diff --git a/.github/ISSUE_TEMPLATE/Bug_report.md b/.github/ISSUE_TEMPLATE/Bug_report.md index de564e58..2071195d 100644 --- a/.github/ISSUE_TEMPLATE/Bug_report.md +++ b/.github/ISSUE_TEMPLATE/Bug_report.md @@ -3,28 +3,63 @@ name: Bug report about: Report a bug for this project --- + -**README and remove me** -For community support and other discussion, you are welcome to visit and stay with us @ Freenode, #mailcow -Answering can take a few seconds up to many hours, please be patient. -Commercial support, including a ticket system, can be found @ https://www.servercow.de/mailcow#support - we are also available via Telegram. \o/ +**Prior to placing the issue, please check following:** *(fill out each checkbox with a `X` once done)* +- [ ] I understand that not following below instructions might result in immediate closing and deletion of my issue. +- [ ] I have understood that answers are voluntary and community-driven, and not commercial support. +- [ ] I have verified that my issue has not been already answered in the past. I also checked previous [issues](https://github.com/mailcow/mailcow-dockerized/issues). -**Describe the bug, try to make it reproducible** -A clear and concise description of what the bug is. How can it be reproduced? -If applicable, add screenshots to help explain your problem. Very useful for bugs in mailcow UI. +--- -**System information and quick debugging** -General logs: -- Please take a look at the [documentation](https://mailcow.github.io/mailcow-dockerized-docs/debug-logs/). +**Description of the bug**: What kind of issue have you *exactly* come across? + + +My issue is... + +**Reproduction of said bug**: How *exactly* do you reproduce the bug? + + +1. I go to... +2. And then to... +3. But once I do... + +__I have tried or I do...__ *(fill out each checkbox with a `X` if applicable)* +- [ ] In case of WebUI issue, I have tried clearing the browser cache and the issue persists. +- [ ] I do run mailcow on a Synology, QNAP or any other sort of NAS. + +**System information** + Further information (where applicable): - - Your OS (is Apparmor or SELinux active?) - - Your virtualization technology (KVM/QEMU, Xen, VMware, VirtualBox etc.) - - Your server/VM specifications (Memory, CPU Cores) - - Don't try to run mailcow on a Synology or QNAP NAS, do you? - - Docker and Docker Compose versions - - Output of `git diff origin/master`, any other changes to the code? + +| Question | Answer | +| --- | --- | +| My operating system | I_DO_REPLY_HERE | +| Is Apparmor, SELinux or similar active? | I_DO_REPLY_HERE | +| Virtualization technlogy (KVM, VMware, Xen, etc) | I_DO_REPLY_HERE | +| Server/VM specifications (Memory, CPU Cores) | I_DO_REPLY_HERE | +| Docker Version (`docker version`) | I_DO_REPLY_HERE | +| Docker-Compose Version (`docker-compose version`) | I_DO_REPLY_HERE | +| Reverse proxy (custom solution) | I_DO_REPLY_HERE | + +Further notes: + - Output of `git diff origin/master`, any other changes to the code? If so, please post them. - All third-party firewalls and custom iptables rules are unsupported. Please check the Docker docs about how to use Docker with your own ruleset. Nevertheless, iptabels output can help _us_ to help _you_: `iptables -L -vn`, `ip6tables -L -vn`, `iptables -L -vn -t nat` and `ip6tables -L -vn -t nat ` - - Reverse proxy? If you think this problem is related to your reverse proxy, please post your configuration. - - Browser (if it's a Web UI issue) - please clean your browser cache and try again, problem persists? - Check `docker exec -it $(docker ps -qf name=acme-mailcow) dig +short stackoverflow.com @172.22.1.254` (set the IP accordingly, if you changed the internal mailcow network) and `docker exec -it $(docker ps -qf name=acme-mailcow) dig +short stackoverflow.com @1.1.1.1` - output? Timeout? + + General logs: +- Please take a look at the [official documentation](https://mailcow.github.io/mailcow-dockerized-docs/debug-logs/). diff --git a/.gitignore b/.gitignore index 624e1c06..1e2733f5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,12 @@ rebuild-images.sh data/conf/sogo/sieve.creds +data/conf/phpfpm/sogo-sso/sogo-sso.pass data/conf/dovecot/dovecot-master.passwd data/conf/dovecot/dovecot-master.userdb mailcow.conf mailcow.conf_backup data/conf/nginx/*.active +data/conf/postfix/extra.cf data/conf/postfix/sql data/conf/postfix/allow_mailcow_local.regexp data/conf/dovecot/sql @@ -24,11 +26,15 @@ data/conf/nginx/*.custom data/conf/nginx/*.bak data/conf/dovecot/acl_anyone data/conf/dovecot/mail_plugins* +data/conf/dovecot/sogo-sso.conf data/conf/dovecot/extra.conf +data/conf/dovecot/shared_namespace.conf data/conf/rspamd/custom/* data/conf/portainer/ data/gitea/ data/gogs/ data/conf/sogo/plist_ldap +update_diffs/ .github/ docker-compose.override.yml +refresh_images.sh diff --git a/README.md b/README.md index ec500d9c..356a2f8a 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ ## Want to support mailcow? -Donate via **PayPal** [![Donate](https://www.paypalobjects.com/en_US/i/btn/btn_donate_LG.gif)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=JWBSYHF4SMC68) or via **Liberapay** [![Liberapay.com](https://mailcow.email/img/lp.png)](https://liberapay.com/mailcow) +Please [consider a support contract (around 30 € per month) with Servercow](https://www.servercow.de/mailcow#support) to support further development. _We_ support _you_ while _you_ support _us_. :) Or just spread the word: moo. diff --git a/data/Dockerfiles/acme/Dockerfile b/data/Dockerfiles/acme/Dockerfile index a17064fe..dd872dff 100644 --- a/data/Dockerfiles/acme/Dockerfile +++ b/data/Dockerfiles/acme/Dockerfile @@ -1,8 +1,9 @@ -FROM alpine:3.9 +FROM alpine:3.10 LABEL maintainer "Andre Peters " -RUN apk add --update --no-cache \ +RUN apk upgrade --no-cache \ + && apk add --update --no-cache \ bash \ curl \ openssl \ @@ -12,9 +13,9 @@ RUN apk add --update --no-cache \ redis \ tini \ tzdata \ - py-pip \ - && pip install --upgrade pip \ - && pip install acme-tiny + python3 \ + && python3 -m pip install --upgrade pip \ + && python3 -m pip install acme-tiny COPY docker-entrypoint.sh /srv/docker-entrypoint.sh COPY expand6.sh /srv/expand6.sh diff --git a/data/Dockerfiles/acme/docker-entrypoint.sh b/data/Dockerfiles/acme/docker-entrypoint.sh index bb9a5a53..a06ce01c 100755 --- a/data/Dockerfiles/acme/docker-entrypoint.sh +++ b/data/Dockerfiles/acme/docker-entrypoint.sh @@ -5,6 +5,21 @@ exec 5>&1 # Thanks to https://github.com/cvmiller -> https://github.com/cvmiller/expand6 source /srv/expand6.sh +# Skipping IP check when we like to live dangerously +if [[ "${SKIP_IP_CHECK}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then + SKIP_IP_CHECK=y +fi + +# Skipping HTTP check when we like to live dangerously +if [[ "${SKIP_HTTP_VERIFICATION}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then + SKIP_HTTP_VERIFICATION=y +fi + +# Request certificate for MAILCOW_HOSTNAME ony +if [[ "${ONLY_MAILCOW_HOSTNAME}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then + ONLY_MAILCOW_HOSTNAME=y +fi + log_f() { if [[ ${2} == "no_nl" ]]; then echo -n "$(date) - ${1}" @@ -42,7 +57,6 @@ mkdir -p ${ACME_BASE}/acme [[ -f ${ACME_BASE}/acme/private/privkey.pem ]] && mv ${ACME_BASE}/acme/private/privkey.pem ${ACME_BASE}/acme/key.pem [[ -f ${ACME_BASE}/acme/private/account.key ]] && mv ${ACME_BASE}/acme/private/account.key ${ACME_BASE}/acme/account.pem - reload_configurations(){ # Reading container IDs # Wrapping as array to ensure trimmed content when calling $NGINX etc. @@ -118,21 +132,25 @@ get_ipv6(){ } verify_challenge_path(){ + if [[ ${SKIP_HTTP_VERIFICATION} == "y" ]]; then + echo '(skipping check, returning 0)' + return 0 + fi # verify_challenge_path URL 4|6 - RAND_FILE=${RANDOM}${RANDOM}${RANDOM} - touch /var/www/acme/${RAND_FILE} - if [[ "$(curl -${2} http://${1}/.well-known/acme-challenge/${RAND_FILE} --write-out %{http_code} --silent --output /dev/null)" =~ ^(2|3) ]]; then - rm /var/www/acme/${RAND_FILE} + RANDOM_N=${RANDOM}${RANDOM}${RANDOM} + echo ${RANDOM_N} > /var/www/acme/${RANDOM_N} + if [[ "$(curl --insecure -${2} -L http://${1}/.well-known/acme-challenge/${RANDOM_N} --silent)" == "${RANDOM_N}" ]]; then + rm /var/www/acme/${RANDOM_N} return 0 else - rm /var/www/acme/${RAND_FILE} + rm /var/www/acme/${RANDOM_N} return 1 fi } [[ ! -f ${ACME_BASE}/dhparams.pem ]] && cp ${SSL_EXAMPLE}/dhparams.pem ${ACME_BASE}/dhparams.pem -if [[ -f ${ACME_BASE}/cert.pem ]] && [[ -f ${ACME_BASE}/key.pem ]]; then +if [[ -f ${ACME_BASE}/cert.pem ]] && [[ -f ${ACME_BASE}/key.pem ]] && [[ $(stat -c%s ${ACME_BASE}/cert.pem) != 0 ]]; then ISSUER=$(openssl x509 -in ${ACME_BASE}/cert.pem -noout -issuer) if [[ ${ISSUER} != *"Let's Encrypt"* && ${ISSUER} != *"mailcow"* && ${ISSUER} != *"Fake LE Intermediate"* ]]; then log_f "Found certificate with issuer other than mailcow snake-oil CA and Let's Encrypt, skipping ACME client..." @@ -156,6 +174,7 @@ else exec env TRIGGER_RESTART=1 $(readlink -f "$0") fi fi +chmod 600 ${ACME_BASE}/key.pem log_f "Waiting for database... " no_nl while ! mysqladmin status --socket=/var/run/mysqld/mysqld.sock -u${DBUSER} -p${DBPASS} --silent; do @@ -196,10 +215,8 @@ while true; do log_f "Using existing Lets Encrypt account key ${ACME_BASE}/acme/account.pem" fi - # Skipping IP check when we like to live dangerously - if [[ "${SKIP_IP_CHECK}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then - SKIP_IP_CHECK=y - fi + chmod 600 ${ACME_BASE}/acme/key.pem + chmod 600 ${ACME_BASE}/acme/account.pem # Cleaning up and init validation arrays unset SQL_DOMAIN_ARR @@ -228,7 +245,7 @@ while true; do ADDITIONAL_SAN_ARR+=($i) fi done - ADDITIONAL_WC_ARR+=('autodiscover') + ADDITIONAL_WC_ARR+=('autodiscover' 'autoconfig') # Start IP detection log_f "Detecting IP addresses... " no_nl @@ -255,9 +272,10 @@ while true; do SQL_DOMAIN_ARR+=("${domains}") done < <(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT domain FROM domain WHERE backupmx=0" -Bs) + if [[ ${ONLY_MAILCOW_HOSTNAME} != "y" ]]; then for SQL_DOMAIN in "${SQL_DOMAIN_ARR[@]}"; do for SUBDOMAIN in "${ADDITIONAL_WC_ARR[@]}"; do - if [[ "${SUBDOMAIN}.${SQL_DOMAIN}" != "${MAILCOW_HOSTNAME}" ]]; then + if [[ "${SUBDOMAIN}.${SQL_DOMAIN}" != "${MAILCOW_HOSTNAME}" ]]; then A_SUBDOMAIN=$(dig A ${SUBDOMAIN}.${SQL_DOMAIN} +short | tail -n 1) AAAA_SUBDOMAIN=$(dig AAAA ${SUBDOMAIN}.${SQL_DOMAIN} +short | tail -n 1) # Check if CNAME without v6 enabled target @@ -268,10 +286,10 @@ while true; do log_f "Found AAAA record for ${SUBDOMAIN}.${SQL_DOMAIN}: ${AAAA_SUBDOMAIN} - skipping A record check" if [[ $(expand ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}) == $(expand ${AAAA_SUBDOMAIN}) ]] || [[ ${SKIP_IP_CHECK} == "y" ]]; then if verify_challenge_path "${SUBDOMAIN}.${SQL_DOMAIN}" 6; then - log_f "Confirmed AAAA record ${AAAA_SUBDOMAIN}" + log_f "Confirmed AAAA record with IP ${AAAA_SUBDOMAIN}, adding SAN" VALIDATED_CONFIG_DOMAINS+=("${SUBDOMAIN}.${SQL_DOMAIN}") else - log_f "Confirmed AAAA record ${AAAA_SUBDOMAIN}, but HTTP validation failed" + log_f "Confirmed AAAA record with IP ${AAAA_SUBDOMAIN}, but HTTP validation failed" fi else log_f "Cannot match your IP ${IPV6:-NO_IPV6_LINK} against hostname ${SUBDOMAIN}.${SQL_DOMAIN} ($(expand ${AAAA_SUBDOMAIN}))" @@ -280,10 +298,10 @@ while true; do log_f "Found A record for ${SUBDOMAIN}.${SQL_DOMAIN}: ${A_SUBDOMAIN}" if [[ ${IPV4:-ERR} == ${A_SUBDOMAIN} ]] || [[ ${SKIP_IP_CHECK} == "y" ]]; then if verify_challenge_path "${SUBDOMAIN}.${SQL_DOMAIN}" 4; then - log_f "Confirmed A record ${A_SUBDOMAIN}" + log_f "Confirmed A record ${A_SUBDOMAIN}, adding SAN" VALIDATED_CONFIG_DOMAINS+=("${SUBDOMAIN}.${SQL_DOMAIN}") else - log_f "Confirmed AAAA record ${A_SUBDOMAIN}, but HTTP validation failed" + log_f "Confirmed A record with IP ${A_SUBDOMAIN}, but HTTP validation failed" fi else log_f "Cannot match your IP ${IPV4} against hostname ${SUBDOMAIN}.${SQL_DOMAIN} (${A_SUBDOMAIN})" @@ -294,6 +312,7 @@ while true; do fi done done + fi A_MAILCOW_HOSTNAME=$(dig A ${MAILCOW_HOSTNAME} +short | tail -n 1) AAAA_MAILCOW_HOSTNAME=$(dig AAAA ${MAILCOW_HOSTNAME} +short | tail -n 1) @@ -308,10 +327,10 @@ while true; do log_f "Confirmed AAAA record ${AAAA_MAILCOW_HOSTNAME}" VALIDATED_MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME} else - log_f "Confirmed AAAA record ${A_MAILCOW_HOSTNAME}, but HTTP validation failed" + log_f "Confirmed AAAA record with IP ${AAAA_MAILCOW_HOSTNAME}, but HTTP validation failed" fi else - log_f "Cannot match your IP ${IPV6:-NO_IPV6_LINK} against hostname ${MAILCOW_HOSTNAME} ($(expand ${AAAA_MAILCOW_HOSTNAME}))" + log_f "Cannot match your IP ${IPV6:-NO_IPV6_LINK} against hostname ${MAILCOW_HOSTNAME} (DNS returned $(expand ${AAAA_MAILCOW_HOSTNAME}))" fi elif [[ ! -z ${A_MAILCOW_HOSTNAME} ]]; then log_f "Found A record for ${MAILCOW_HOSTNAME}: ${A_MAILCOW_HOSTNAME}" @@ -320,15 +339,16 @@ while true; do log_f "Confirmed A record ${A_MAILCOW_HOSTNAME}" VALIDATED_MAILCOW_HOSTNAME=${MAILCOW_HOSTNAME} else - log_f "Confirmed A record ${A_MAILCOW_HOSTNAME}, but HTTP validation failed" + log_f "Confirmed A record with IP ${A_MAILCOW_HOSTNAME}, but HTTP validation failed" fi else - log_f "Cannot match your IP ${IPV4} against hostname ${MAILCOW_HOSTNAME} (${A_MAILCOW_HOSTNAME})" + log_f "Cannot match your IP ${IPV4} against hostname ${MAILCOW_HOSTNAME} (DNS returned ${A_MAILCOW_HOSTNAME})" fi else log_f "No A or AAAA record found for hostname ${MAILCOW_HOSTNAME}" fi + if [[ ${ONLY_MAILCOW_HOSTNAME} != "y" ]]; then for SAN in "${ADDITIONAL_SAN_ARR[@]}"; do # Skip on CAA errors for SAN SAN_PARENT_DOMAIN=$(echo ${SAN} | cut -d. -f2-) @@ -354,13 +374,13 @@ while true; do log_f "Found AAAA record for ${SAN}: ${AAAA_SAN} - skipping A record check" if [[ $(expand ${IPV6:-"0000:0000:0000:0000:0000:0000:0000:0000"}) == $(expand ${AAAA_SAN}) ]] || [[ ${SKIP_IP_CHECK} == "y" ]]; then if verify_challenge_path "${SAN}" 6; then - log_f "Confirmed AAAA record ${AAAA_SAN}" + log_f "Confirmed AAAA record with IP ${AAAA_SAN}" ADDITIONAL_VALIDATED_SAN+=("${SAN}") else - log_f "Confirmed AAAA record ${AAAA_SAN}, but HTTP validation failed" + log_f "Confirmed AAAA record with IP ${AAAA_SAN}, but HTTP validation failed" fi else - log_f "Cannot match your IP ${IPV6:-NO_IPV6_LINK} against hostname ${SAN} ($(expand ${AAAA_SAN}))" + log_f "Cannot match your IP ${IPV6:-NO_IPV6_LINK} against hostname ${SAN} (DNS returned $(expand ${AAAA_SAN}))" fi elif [[ ! -z ${A_SAN} ]]; then log_f "Found A record for ${SAN}: ${A_SAN}" @@ -369,21 +389,23 @@ while true; do log_f "Confirmed A record ${A_SAN}" ADDITIONAL_VALIDATED_SAN+=("${SAN}") else - log_f "Confirmed A record ${A_SAN}, but HTTP validation failed" + log_f "Confirmed A record with IP ${A_SAN}, but HTTP validation failed" fi else - log_f "Cannot match your IP ${IPV4} against hostname ${SAN} (${A_SAN})" + log_f "Cannot match your IP ${IPV4} against hostname ${SAN} (DNS returned ${A_SAN})" fi else log_f "No A or AAAA record found for hostname ${SAN}" fi done + fi # Unique elements ALL_VALIDATED=(${VALIDATED_MAILCOW_HOSTNAME} $(echo ${VALIDATED_CONFIG_DOMAINS[*]} ${ADDITIONAL_VALIDATED_SAN[*]} | xargs -n1 | sort -u | xargs)) if [[ -z ${ALL_VALIDATED[*]} ]]; then log_f "Cannot validate hostnames, skipping Let's Encrypt for 1 hour." log_f "Use SKIP_LETS_ENCRYPT=y in mailcow.conf to skip it permanently." + redis-cli -h redis SET ACME_FAIL_TIME "$(date +%s)" sleep 1h exec $(readlink -f "$0") fi @@ -397,19 +419,19 @@ while true; do # Finding difference in SAN array now vs. SAN array by current configuration array_diff ORPHANED_SAN SAN_ARRAY_NOW ALL_VALIDATED if [[ ! -z ${ORPHANED_SAN[*]} ]]; then - log_f "Found orphaned SANs ${ORPHANED_SAN[*]}" + log_f "Found orphaned SAN ${ORPHANED_SAN[*]}" SAN_CHANGE=1 fi array_diff ADDED_SAN ALL_VALIDATED SAN_ARRAY_NOW if [[ ! -z ${ADDED_SAN[*]} ]]; then - log_f "Found new SANs ${ADDED_SAN[*]}" + log_f "Found new SAN ${ADDED_SAN[*]}" SAN_CHANGE=1 fi if [[ ${SAN_CHANGE} == 0 ]]; then # Certificate did not change but could be due for renewal (4 weeks) - if ! openssl x509 -checkend 1209600 -noout -in ${ACME_BASE}/cert.pem; then - log_f "Certificate is due for renewal (< 2 weeks)" + if ! openssl x509 -checkend 2592000 -noout -in ${ACME_BASE}/cert.pem; then + log_f "Certificate is due for renewal (< 30 days)" else log_f "Certificate validation done, neither changed nor due for renewal, sleeping for another day." sleep 1d @@ -462,7 +484,7 @@ while true; do cp ${ACME_BASE}/acme/cert.pem ${ACME_BASE}/cert.pem cp ${ACME_BASE}/acme/key.pem ${ACME_BASE}/key.pem reload_configurations - rm /var/www/acme/* + rm /var/www/acme/* 2> /dev/null log_f "Certificate successfully deployed, removing backup, sleeping 1d" sleep 1d else @@ -476,6 +498,7 @@ while true; do ACME_RESPONSE_B64=$(echo "${ACME_RESPONSE}" | openssl enc -e -A -base64) log_f "${ACME_RESPONSE_B64}" redis_only b64 log_f "Retrying in 30 minutes..." + redis-cli -h redis SET ACME_FAIL_TIME "$(date +%s)" sleep 30m exec $(readlink -f "$0") ;; diff --git a/data/Dockerfiles/clamd/Dockerfile b/data/Dockerfiles/clamd/Dockerfile index f5a3903b..f61f7c21 100644 --- a/data/Dockerfiles/clamd/Dockerfile +++ b/data/Dockerfiles/clamd/Dockerfile @@ -3,7 +3,7 @@ FROM debian:stretch-slim LABEL maintainer "André Peters " # Installation -ENV CLAMAV 0.101.1 +ENV CLAMAV 0.101.4 RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ diff --git a/data/Dockerfiles/clamd/bootstrap.sh b/data/Dockerfiles/clamd/bootstrap.sh index 1d49cd20..4795ee13 100755 --- a/data/Dockerfiles/clamd/bootstrap.sh +++ b/data/Dockerfiles/clamd/bootstrap.sh @@ -48,6 +48,7 @@ while true; do sleep 2m SANE_MIRRORS="$(dig +ignore +short rsync.sanesecurity.net)" for sane_mirror in ${SANE_MIRRORS}; do + CE= rsync -avp --chown=clamav:clamav --chmod=Du=rwx,Dgo=rx,Fu=rw,Fog=r --timeout=5 rsync://${sane_mirror}/sanesecurity/ \ --include 'blurl.ndb' \ --include 'junk.ndb' \ @@ -61,7 +62,9 @@ while true; do --include 'sanesecurity.ftm' \ --include 'sigwhitelist.ign2' \ --exclude='*' /var/lib/clamav/ - if [ $? -eq 0 ]; then + CE=$? + chmod 755 /var/lib/clamav/ + if [ ${CE} -eq 0 ]; then echo RELOAD | nc localhost 3310 break fi diff --git a/data/Dockerfiles/dockerapi/Dockerfile b/data/Dockerfiles/dockerapi/Dockerfile index 67bb6b07..37a2db5a 100644 --- a/data/Dockerfiles/dockerapi/Dockerfile +++ b/data/Dockerfiles/dockerapi/Dockerfile @@ -1,11 +1,12 @@ -FROM alpine:3.9 +FROM alpine:3.10 LABEL maintainer "Andre Peters " -RUN apk add -U --no-cache python2 python-dev py-pip gcc musl-dev tzdata openssl-dev libffi-dev \ - && pip2 install --upgrade pip \ - && pip2 install --upgrade docker==3.0.1 flask flask-restful pyOpenSSL \ - && apk del python-dev py2-pip gcc +WORKDIR /app -COPY server.py / +RUN apk add --update --no-cache python3 openssl tzdata \ + && pip3 install --upgrade pip \ + && pip3 install --upgrade docker flask flask-restful -CMD ["python2", "-u", "/server.py"] +COPY server.py /app/ + +CMD ["python3", "-u", "/app/server.py"] diff --git a/data/Dockerfiles/dockerapi/server.py b/data/Dockerfiles/dockerapi/server.py index d38775db..8d6a1c66 100644 --- a/data/Dockerfiles/dockerapi/server.py +++ b/data/Dockerfiles/dockerapi/server.py @@ -1,10 +1,11 @@ +#!/usr/bin/env python3 + from flask import Flask from flask_restful import Resource, Api from flask import jsonify from flask import Response from flask import request from threading import Thread -from OpenSSL import crypto import docker import uuid import signal @@ -14,6 +15,8 @@ import re import sys import ssl import socket +import subprocess +import traceback docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock', version='auto') app = Flask(__name__) @@ -43,261 +46,316 @@ class container_get(Resource): class container_post(Resource): def post(self, container_id, post_action): if container_id and container_id.isalnum() and post_action: - if post_action == 'stop': - try: - for container in docker_client.containers.list(all=True, filters={"id": container_id}): - container.stop() - return jsonify(type='success', msg='command completed successfully') - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif post_action == 'start': - try: - for container in docker_client.containers.list(all=True, filters={"id": container_id}): - container.start() - return jsonify(type='success', msg='command completed successfully') - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif post_action == 'restart': - try: - for container in docker_client.containers.list(all=True, filters={"id": container_id}): - container.restart() - return jsonify(type='success', msg='command completed successfully') - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif post_action == 'top': - try: - for container in docker_client.containers.list(all=True, filters={"id": container_id}): - return jsonify(type='success', msg=container.top()) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif post_action == 'stats': - try: - for container in docker_client.containers.list(all=True, filters={"id": container_id}): - return jsonify(type='success', msg=container.stats(decode=True, stream=False)) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif post_action == 'exec': - - if not request.json or not 'cmd' in request.json: - return jsonify(type='danger', msg='cmd is missing') - - if request.json['cmd'] == 'mailq': - if 'items' in request.json: - r = re.compile("^[0-9a-fA-F]+$") - filtered_qids = filter(r.match, request.json['items']) - if filtered_qids: - if request.json['task'] == 'delete': - flagged_qids = ['-d %s' % i for i in filtered_qids] - sanitized_string = str(' '.join(flagged_qids)); - try: - for container in docker_client.containers.list(filters={"id": container_id}): - postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string]) - return exec_run_handler('generic', postsuper_r) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - if request.json['task'] == 'hold': - flagged_qids = ['-h %s' % i for i in filtered_qids] - sanitized_string = str(' '.join(flagged_qids)); - try: - for container in docker_client.containers.list(filters={"id": container_id}): - postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string]) - return exec_run_handler('generic', postsuper_r) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - if request.json['task'] == 'unhold': - flagged_qids = ['-H %s' % i for i in filtered_qids] - sanitized_string = str(' '.join(flagged_qids)); - try: - for container in docker_client.containers.list(filters={"id": container_id}): - postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string]) - return exec_run_handler('generic', postsuper_r) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - if request.json['task'] == 'deliver': - flagged_qids = ['-i %s' % i for i in filtered_qids] - try: - for container in docker_client.containers.list(filters={"id": container_id}): - for i in flagged_qids: - postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix') - # todo: check each exit code - return jsonify(type='success', msg=str("Scheduled immediate delivery")) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - elif request.json['task'] == 'list': - try: - for container in docker_client.containers.list(filters={"id": container_id}): - mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix') - return exec_run_handler('utf8_text_only', mailq_return) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - elif request.json['task'] == 'flush': - try: - for container in docker_client.containers.list(filters={"id": container_id}): - postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix') - return exec_run_handler('generic', postqueue_r) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - elif request.json['task'] == 'super_delete': - try: - for container in docker_client.containers.list(filters={"id": container_id}): - postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"]) - return exec_run_handler('generic', postsuper_r) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif request.json['cmd'] == 'system': - if request.json['task'] == 'fts_rescan': - if 'username' in request.json: - try: - for container in docker_client.containers.list(filters={"id": container_id}): - rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/local/bin/doveadm fts rescan -u '" + request.json['username'].replace("'", "'\\''") + "'"], user='vmail') - if rescan_return.exit_code == 0: - return jsonify(type='success', msg='fts_rescan: rescan triggered') - else: - return jsonify(type='warning', msg='fts_rescan error') - except Exception as e: - return jsonify(type='danger', msg=str(e)) - if 'all' in request.json: - try: - for container in docker_client.containers.list(filters={"id": container_id}): - rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/local/bin/doveadm fts rescan -A"], user='vmail') - if rescan_return.exit_code == 0: - return jsonify(type='success', msg='fts_rescan: rescan triggered') - else: - return jsonify(type='warning', msg='fts_rescan error') - except Exception as e: - return jsonify(type='danger', msg=str(e)) - elif request.json['task'] == 'df': - if 'dir' in request.json: - try: - for container in docker_client.containers.list(filters={"id": container_id}): - df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request.json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody') - if df_return.exit_code == 0: - return df_return.output.rstrip() - else: - return "0,0,0,0,0,0" - except Exception as e: - return jsonify(type='danger', msg=str(e)) - elif request.json['task'] == 'mysql_upgrade': - try: - for container in docker_client.containers.list(filters={"id": container_id}): - sql_shell = container.exec_run(["/bin/bash"], stdin=True, socket=True, user='mysql') - upgrade_cmd = "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n" - sql_socket = sql_shell.output; - try : - sql_socket.sendall(upgrade_cmd.encode('utf-8')) - sql_socket.shutdown(socket.SHUT_WR) - except socket.error: - return jsonify(type='danger', msg=str('socket error')) - worker_response = recv_socket_data(sql_socket) - matched = False - for line in worker_response.split("\n"): - if 'is already upgraded to' in line: - matched = True - if matched: - return jsonify(type='success', msg='mysql_upgrade: already upgraded') - else: - container.restart() - return jsonify(type='warning', msg='mysql_upgrade: upgrade was applied') - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif request.json['cmd'] == 'reload': - if request.json['task'] == 'dovecot': - try: - for container in docker_client.containers.list(filters={"id": container_id}): - reload_return = container.exec_run(["/bin/bash", "-c", "/usr/local/sbin/dovecot reload"]) - return exec_run_handler('generic', reload_return) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - if request.json['task'] == 'postfix': - try: - for container in docker_client.containers.list(filters={"id": container_id}): - reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"]) - return exec_run_handler('generic', reload_return) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - if request.json['task'] == 'nginx': - try: - for container in docker_client.containers.list(filters={"id": container_id}): - reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"]) - return exec_run_handler('generic', reload_return) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif request.json['cmd'] == 'sieve': - if request.json['task'] == 'list': - if 'username' in request.json: - try: - for container in docker_client.containers.list(filters={"id": container_id}): - sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/local/bin/doveadm sieve list -u '" + request.json['username'].replace("'", "'\\''") + "'"]) - return exec_run_handler('utf8_text_only', sieve_return) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - elif request.json['task'] == 'print': - if 'username' in request.json and 'script_name' in request.json: - try: - for container in docker_client.containers.list(filters={"id": container_id}): - sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/local/bin/doveadm sieve get -u '" + request.json['username'].replace("'", "'\\''") + "' '" + request.json['script_name'].replace("'", "'\\''") + "'"]) - return exec_run_handler('utf8_text_only', sieve_return) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif request.json['cmd'] == 'maildir': - if request.json['task'] == 'cleanup': - if 'maildir' in request.json: - try: - for container in docker_client.containers.list(filters={"id": container_id}): - sane_name = re.sub(r'\W+', '', request.json['maildir']) - maildir_cleanup = container.exec_run(["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request.json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request.json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"], user='vmail') - return exec_run_handler('generic', maildir_cleanup) - except Exception as e: - return jsonify(type='danger', msg=str(e)) - - elif request.json['cmd'] == 'rspamd': - if request.json['task'] == 'worker_password': - if 'raw' in request.json: - try: - for container in docker_client.containers.list(filters={"id": container_id}): - worker_shell = container.exec_run(["/bin/bash"], stdin=True, socket=True, user='_rspamd') - worker_cmd = "/usr/bin/rspamadm pw -e -p '" + request.json['raw'].replace("'", "'\\''") + "' 2> /dev/null\n" - worker_socket = worker_shell.output; - try : - worker_socket.sendall(worker_cmd.encode('utf-8')) - worker_socket.shutdown(socket.SHUT_WR) - except socket.error: - return jsonify(type='danger', msg=str('socket error')) - worker_response = recv_socket_data(worker_socket) - matched = False - for line in worker_response.split("\n"): - if '$2$' in line: - matched = True - hash = line.strip() - hash_out = re.search('\$2\$.+$', hash).group(0) - f = open("/access.inc", "w") - f.write('enable_password = "' + re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip()) + '";\n') - f.close() - container.restart() - if matched: - return jsonify(type='success', msg='command completed successfully') - else: - return jsonify(type='danger', msg='command did not complete') - except Exception as e: - return jsonify(type='danger', msg=str(e)) + try: + """Dispatch container_post api call""" + if post_action == 'exec': + if not request.json or not 'cmd' in request.json: + return jsonify(type='danger', msg='cmd is missing') + if not request.json or not 'task' in request.json: + return jsonify(type='danger', msg='task is missing') + api_call_method_name = '__'.join(['container_post', str(post_action), str(request.json['cmd']), str(request.json['task']) ]) else: - return jsonify(type='danger', msg='Unknown command') + api_call_method_name = '__'.join(['container_post', str(post_action) ]) - else: - return jsonify(type='danger', msg='invalid action') + api_call_method = getattr(self, api_call_method_name, lambda container_id: jsonify(type='danger', msg='container_post - unknown api call')) + + + print("api call: %s, container_id: %s" % (api_call_method_name, container_id)) + return api_call_method(container_id) + except Exception as e: + print("error - container_post: %s" % str(e)) + return jsonify(type='danger', msg=str(e)) else: - return jsonify(type='danger', msg='invalid container id or missing action') + return jsonify(type='danger', msg='invalid container id or missing action') + + + # api call: container_post - post_action: stop + def container_post__stop(self, container_id): + for container in docker_client.containers.list(all=True, filters={"id": container_id}): + container.stop() + return jsonify(type='success', msg='command completed successfully') + + + # api call: container_post - post_action: start + def container_post__start(self, container_id): + for container in docker_client.containers.list(all=True, filters={"id": container_id}): + container.start() + return jsonify(type='success', msg='command completed successfully') + + + # api call: container_post - post_action: restart + def container_post__restart(self, container_id): + for container in docker_client.containers.list(all=True, filters={"id": container_id}): + container.restart() + return jsonify(type='success', msg='command completed successfully') + + + # api call: container_post - post_action: top + def container_post__top(self, container_id): + for container in docker_client.containers.list(all=True, filters={"id": container_id}): + return jsonify(type='success', msg=container.top()) + + + # api call: container_post - post_action: stats + def container_post__stats(self, container_id): + for container in docker_client.containers.list(all=True, filters={"id": container_id}): + for stat in container.stats(decode=True, stream=True): + return jsonify(type='success', msg=stat ) + + + # api call: container_post - post_action: exec - cmd: mailq - task: delete + def container_post__exec__mailq__delete(self, container_id): + if 'items' in request.json: + r = re.compile("^[0-9a-fA-F]+$") + filtered_qids = filter(r.match, request.json['items']) + if filtered_qids: + flagged_qids = ['-d %s' % i for i in filtered_qids] + sanitized_string = str(' '.join(flagged_qids)); + + for container in docker_client.containers.list(filters={"id": container_id}): + postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string]) + return exec_run_handler('generic', postsuper_r) + + + # api call: container_post - post_action: exec - cmd: mailq - task: hold + def container_post__exec__mailq__hold(self, container_id): + if 'items' in request.json: + r = re.compile("^[0-9a-fA-F]+$") + filtered_qids = filter(r.match, request.json['items']) + if filtered_qids: + flagged_qids = ['-h %s' % i for i in filtered_qids] + sanitized_string = str(' '.join(flagged_qids)); + + for container in docker_client.containers.list(filters={"id": container_id}): + postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string]) + return exec_run_handler('generic', postsuper_r) + + + # api call: container_post - post_action: exec - cmd: mailq - task: unhold + def container_post__exec__mailq__unhold(self, container_id): + if 'items' in request.json: + r = re.compile("^[0-9a-fA-F]+$") + filtered_qids = filter(r.match, request.json['items']) + if filtered_qids: + flagged_qids = ['-H %s' % i for i in filtered_qids] + sanitized_string = str(' '.join(flagged_qids)); + + for container in docker_client.containers.list(filters={"id": container_id}): + postsuper_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postsuper " + sanitized_string]) + return exec_run_handler('generic', postsuper_r) + + + # api call: container_post - post_action: exec - cmd: mailq - task: deliver + def container_post__exec__mailq__deliver(self, container_id): + if 'items' in request.json: + r = re.compile("^[0-9a-fA-F]+$") + filtered_qids = filter(r.match, request.json['items']) + if filtered_qids: + flagged_qids = ['-i %s' % i for i in filtered_qids] + + for container in docker_client.containers.list(filters={"id": container_id}): + for i in flagged_qids: + postqueue_r = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postqueue " + i], user='postfix') + # todo: check each exit code + return jsonify(type='success', msg=str("Scheduled immediate delivery")) + + + # api call: container_post - post_action: exec - cmd: mailq - task: list + def container_post__exec__mailq__list(self, container_id): + for container in docker_client.containers.list(filters={"id": container_id}): + mailq_return = container.exec_run(["/usr/sbin/postqueue", "-j"], user='postfix') + return exec_run_handler('utf8_text_only', mailq_return) + + + # api call: container_post - post_action: exec - cmd: mailq - task: flush + def container_post__exec__mailq__flush(self, container_id): + for container in docker_client.containers.list(filters={"id": container_id}): + postqueue_r = container.exec_run(["/usr/sbin/postqueue", "-f"], user='postfix') + return exec_run_handler('generic', postqueue_r) + + + # api call: container_post - post_action: exec - cmd: mailq - task: super_delete + def container_post__exec__mailq__super_delete(self, container_id): + for container in docker_client.containers.list(filters={"id": container_id}): + postsuper_r = container.exec_run(["/usr/sbin/postsuper", "-d", "ALL"]) + return exec_run_handler('generic', postsuper_r) + + + # api call: container_post - post_action: exec - cmd: system - task: fts_rescan + def container_post__exec__system__fts_rescan(self, container_id): + if 'username' in request.json: + for container in docker_client.containers.list(filters={"id": container_id}): + rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -u '" + request.json['username'].replace("'", "'\\''") + "'"], user='vmail') + if rescan_return.exit_code == 0: + return jsonify(type='success', msg='fts_rescan: rescan triggered') + else: + return jsonify(type='warning', msg='fts_rescan error') + + if 'all' in request.json: + for container in docker_client.containers.list(filters={"id": container_id}): + rescan_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm fts rescan -A"], user='vmail') + if rescan_return.exit_code == 0: + return jsonify(type='success', msg='fts_rescan: rescan triggered') + else: + return jsonify(type='warning', msg='fts_rescan error') + + + # api call: container_post - post_action: exec - cmd: system - task: df + def container_post__exec__system__df(self, container_id): + if 'dir' in request.json: + for container in docker_client.containers.list(filters={"id": container_id}): + df_return = container.exec_run(["/bin/bash", "-c", "/bin/df -H '" + request.json['dir'].replace("'", "'\\''") + "' | /usr/bin/tail -n1 | /usr/bin/tr -s [:blank:] | /usr/bin/tr ' ' ','"], user='nobody') + if df_return.exit_code == 0: + return df_return.output.decode('utf-8').rstrip() + else: + return "0,0,0,0,0,0" + + + # api call: container_post - post_action: exec - cmd: system - task: mysql_upgrade + def container_post__exec__system__mysql_upgrade(self, container_id): + for container in docker_client.containers.list(filters={"id": container_id}): + cmd = "/usr/bin/mysql_upgrade -uroot -p'" + os.environ['DBROOT'].replace("'", "'\\''") + "'\n" + cmd_response = exec_cmd_container(container, cmd, user='mysql') + + matched = False + for line in cmd_response.split("\n"): + if 'is already upgraded to' in line: + matched = True + if matched: + return jsonify(type='success', msg='mysql_upgrade: already upgraded') + else: + container.restart() + return jsonify(type='warning', msg='mysql_upgrade: upgrade was applied') + + + # api call: container_post - post_action: exec - cmd: reload - task: dovecot + def container_post__exec__reload__dovecot(self, container_id): + for container in docker_client.containers.list(filters={"id": container_id}): + reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/dovecot reload"]) + return exec_run_handler('generic', reload_return) + + + # api call: container_post - post_action: exec - cmd: reload - task: postfix + def container_post__exec__reload__postfix(self, container_id): + for container in docker_client.containers.list(filters={"id": container_id}): + reload_return = container.exec_run(["/bin/bash", "-c", "/usr/sbin/postfix reload"]) + return exec_run_handler('generic', reload_return) + + + # api call: container_post - post_action: exec - cmd: reload - task: nginx + def container_post__exec__reload__nginx(self, container_id): + for container in docker_client.containers.list(filters={"id": container_id}): + reload_return = container.exec_run(["/bin/sh", "-c", "/usr/sbin/nginx -s reload"]) + return exec_run_handler('generic', reload_return) + + + # api call: container_post - post_action: exec - cmd: sieve - task: list + def container_post__exec__sieve__list(self, container_id): + if 'username' in request.json: + for container in docker_client.containers.list(filters={"id": container_id}): + sieve_return = container.exec_run(["/bin/bash", "-c", "/usr/bin/doveadm sieve list -u '" + request.json['username'].replace("'", "'\\''") + "'"]) + return exec_run_handler('utf8_text_only', sieve_return) + + + # api call: container_post - post_action: exec - cmd: sieve - task: print + def container_post__exec__sieve__print(self, container_id): + if 'username' in request.json and 'script_name' in request.json: + for container in docker_client.containers.list(filters={"id": container_id}): + cmd = ["/bin/bash", "-c", "/usr/bin/doveadm sieve get -u '" + request.json['username'].replace("'", "'\\''") + "' '" + request.json['script_name'].replace("'", "'\\''") + "'"] + sieve_return = container.exec_run(cmd) + return exec_run_handler('utf8_text_only', sieve_return) + + + # api call: container_post - post_action: exec - cmd: maildir - task: cleanup + def container_post__exec__maildir__cleanup(self, container_id): + if 'maildir' in request.json: + for container in docker_client.containers.list(filters={"id": container_id}): + sane_name = re.sub(r'\W+', '', request.json['maildir']) + cmd = ["/bin/bash", "-c", "if [[ -d '/var/vmail/" + request.json['maildir'].replace("'", "'\\''") + "' ]]; then /bin/mv '/var/vmail/" + request.json['maildir'].replace("'", "'\\''") + "' '/var/vmail/_garbage/" + str(int(time.time())) + "_" + sane_name + "'; fi"] + maildir_cleanup = container.exec_run(cmd, user='vmail') + return exec_run_handler('generic', maildir_cleanup) + + + + # api call: container_post - post_action: exec - cmd: rspamd - task: worker_password + def container_post__exec__rspamd__worker_password(self, container_id): + if 'raw' in request.json: + for container in docker_client.containers.list(filters={"id": container_id}): + cmd = "/usr/bin/rspamadm pw -e -p '" + request.json['raw'].replace("'", "'\\''") + "' 2> /dev/null" + cmd_response = exec_cmd_container(container, cmd, user="_rspamd") + + matched = False + for line in cmd_response.split("\n"): + if '$2$' in line: + hash = line.strip() + hash_out = re.search('\$2\$.+$', hash).group(0) + rspamd_passphrase_hash = re.sub('[^0-9a-zA-Z\$]+', '', hash_out.rstrip()) + + rspamd_password_filename = "/etc/rspamd/override.d/worker-controller-password.inc" + cmd = '''/bin/echo 'enable_password = "%s";' > %s && cat %s''' % (rspamd_passphrase_hash, rspamd_password_filename, rspamd_password_filename) + cmd_response = exec_cmd_container(container, cmd, user="_rspamd") + + if rspamd_passphrase_hash.startswith("$2$") and rspamd_passphrase_hash in cmd_response: + container.restart() + matched = True + + if matched: + return jsonify(type='success', msg='command completed successfully') + else: + return jsonify(type='danger', msg='command did not complete') + + +def exec_cmd_container(container, cmd, user, timeout=2, shell_cmd="/bin/bash"): + + def recv_socket_data(c_socket, timeout): + c_socket.setblocking(0) + total_data=[]; + data=''; + begin=time.time() + while True: + if total_data and time.time()-begin > timeout: + break + elif time.time()-begin > timeout*2: + break + try: + data = c_socket.recv(8192) + if data: + total_data.append(data.decode('utf-8')) + #change the beginning time for measurement + begin=time.time() + else: + #sleep for sometime to indicate a gap + time.sleep(0.1) + break + except: + pass + return ''.join(total_data) + + try : + socket = container.exec_run([shell_cmd], stdin=True, socket=True, user=user).output._sock + if not cmd.endswith("\n"): + cmd = cmd + "\n" + socket.send(cmd.encode('utf-8')) + data = recv_socket_data(socket, timeout) + socket.close() + return data + + except Exception as e: + print("error - exec_cmd_container: %s" % str(e)) + traceback.print_exc(file=sys.stdout) + +def exec_run_handler(type, output): + if type == 'generic': + if output.exit_code == 0: + return jsonify(type='success', msg='command completed successfully') + else: + return jsonify(type='danger', msg='command failed: ' + output.output.decode('utf-8')) + if type == 'utf8_text_only': + r = Response(response=output.output.decode('utf-8'), status=200, mimetype="text/plain") + r.headers["Content-Type"] = "text/plain; charset=utf-8" + return r class GracefulKiller: kill_now = False @@ -308,84 +366,26 @@ class GracefulKiller: def exit_gracefully(self, signum, frame): self.kill_now = True +def create_self_signed_cert(): + process = subprocess.Popen( + "openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes -keyout /app/dockerapi_key.pem -out /app/dockerapi_cert.pem -subj /CN=dockerapi/O=mailcow -addext subjectAltName=DNS:dockerapi".split(), + stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell=False + ) + process.wait() + def startFlaskAPI(): create_self_signed_cert() try: ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ctx.check_hostname = False - ctx.load_cert_chain(certfile='/cert.pem', keyfile='/key.pem') + ctx.load_cert_chain(certfile='/app/dockerapi_cert.pem', keyfile='/app/dockerapi_key.pem') except: - print "Cannot initialize TLS, retrying in 5s..." + print ("Cannot initialize TLS, retrying in 5s...") time.sleep(5) app.run(debug=False, host='0.0.0.0', port=443, threaded=True, ssl_context=ctx) -def recv_socket_data(c_socket, timeout=10): - c_socket.setblocking(0) - total_data=[]; - data=''; - begin=time.time() - while True: - if total_data and time.time()-begin > timeout: - break - elif time.time()-begin > timeout*2: - break - try: - data = c_socket.recv(8192) - if data: - total_data.append(data) - #change the beginning time for measurement - begin=time.time() - else: - #sleep for sometime to indicate a gap - time.sleep(0.1) - break - except: - pass - return ''.join(total_data) - -def exec_run_handler(type, output): - if type == 'generic': - if output.exit_code == 0: - return jsonify(type='success', msg='command completed successfully') - else: - return jsonify(type='danger', msg='command failed: ' + output.output) - if type == 'utf8_text_only': - r = Response(response=output.output, status=200, mimetype="text/plain") - r.headers["Content-Type"] = "text/plain; charset=utf-8" - return r - -def create_self_signed_cert(): - success = False - while not success: - try: - pkey = crypto.PKey() - pkey.generate_key(crypto.TYPE_RSA, 2048) - cert = crypto.X509() - cert.get_subject().O = "mailcow" - cert.get_subject().CN = "dockerapi" - cert.set_serial_number(int(uuid.uuid4())) - cert.gmtime_adj_notBefore(0) - cert.gmtime_adj_notAfter(10*365*24*60*60) - cert.set_issuer(cert.get_subject()) - cert.set_pubkey(pkey) - cert.sign(pkey, 'sha512') - cert = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) - pkey = crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey) - with os.fdopen(os.open('/cert.pem', os.O_WRONLY | os.O_CREAT, 0o644), 'w') as handle: - handle.write(cert) - with os.fdopen(os.open('/key.pem', os.O_WRONLY | os.O_CREAT, 0o600), 'w') as handle: - handle.write(pkey) - success = True - except: - time.sleep(1) - try: - os.remove('/cert.pem') - os.remove('/key.pem') - except OSError: - pass - api.add_resource(containers_get, '/containers/json') -api.add_resource(container_get, '/containers//json') +api.add_resource(container_get, '/containers//json') api.add_resource(container_post, '/containers//') if __name__ == '__main__': @@ -397,5 +397,4 @@ if __name__ == '__main__': time.sleep(1) if killer.kill_now: break - print "Stopping dockerapi-mailcow" - + print ("Stopping dockerapi-mailcow") diff --git a/data/Dockerfiles/dovecot/Dockerfile b/data/Dockerfiles/dovecot/Dockerfile index 1320df1f..8bc60d9f 100644 --- a/data/Dockerfiles/dovecot/Dockerfile +++ b/data/Dockerfiles/dovecot/Dockerfile @@ -3,117 +3,112 @@ LABEL maintainer "Andre Peters " ARG DEBIAN_FRONTEND=noninteractive ENV LC_ALL C -ENV DOVECOT_VERSION 2.3.4 -ENV PIGEONHOLE_VERSION 0.5.4 -RUN apt-get update && apt-get -y --no-install-recommends install \ - automake \ - autotools-dev \ - build-essential \ - ca-certificates \ - cpanminus \ - curl \ - default-libmysqlclient-dev \ - dnsutils \ - gettext \ - jq \ - libjson-webtoken-perl \ - libcgi-pm-perl \ - libcrypt-openssl-rsa-perl \ - libdata-uniqid-perl \ - libhtml-parser-perl \ - libmail-imapclient-perl \ - libparse-recdescent-perl \ - libsys-meminfo-perl \ - libtest-mockobject-perl \ - libwww-perl \ - libauthen-ntlm-perl \ - libbz2-dev \ - libcrypt-ssleay-perl \ - libcurl4-openssl-dev \ - libdbd-mysql-perl \ - libdbi-perl \ - libdigest-hmac-perl \ - libexpat1-dev \ - libfile-copy-recursive-perl \ - libio-compress-perl \ - libio-socket-inet6-perl \ - libio-socket-ssl-perl \ - libio-tee-perl \ - libipc-run-perl \ - libldap2-dev \ - liblockfile-simple-perl \ - liblz-dev \ - liblz4-dev \ - liblzma-dev \ - libmodule-scandeps-perl \ - libnet-ssleay-perl \ - libpam-dev \ - libpar-packer-perl \ - libreadonly-perl \ - libssl-dev \ - libterm-readkey-perl \ - libtest-pod-perl \ - libtest-simple-perl \ - libtry-tiny-perl \ - libunicode-string-perl \ - libproc-processtable-perl \ - libtest-nowarnings-perl \ - libtest-deep-perl \ - libtest-warn-perl \ - libregexp-common-perl \ - liburi-perl \ - lzma-dev \ - python-html2text \ - python-jinja2 \ - python-mysql.connector \ - python-redis \ - make \ - mysql-client \ - procps \ - supervisor \ - cron \ - redis-server \ - syslog-ng \ - syslog-ng-core \ - syslog-ng-mod-redis \ - && rm -rf /var/lib/apt/lists/* \ - && curl https://www.dovecot.org/releases/2.3/dovecot-$DOVECOT_VERSION.tar.gz | tar xvz \ - && cd dovecot-$DOVECOT_VERSION \ - && ./configure --with-solr --with-mysql --with-ldap --with-lzma --with-lz4 --with-ssl=openssl --with-notify=inotify --with-storages=mdbox,sdbox,maildir,mbox,imapc,pop3c --with-bzlib --with-zlib --enable-hardening \ - && make -j3 \ - && make install \ - && make clean \ - && cd .. && rm -rf dovecot-$DOVECOT_VERSION \ - && curl https://pigeonhole.dovecot.org/releases/2.3/dovecot-2.3-pigeonhole-$PIGEONHOLE_VERSION.tar.gz | tar xvz \ - && cd dovecot-2.3-pigeonhole-$PIGEONHOLE_VERSION \ - && ./configure \ - && make -j3 \ - && make install \ - && make clean \ - && cd .. \ - && rm -rf dovecot-2.3-pigeonhole-$PIGEONHOLE_VERSION \ - && cpanm Data::Uniqid Mail::IMAPClient String::Util \ - && groupadd -g 5000 vmail \ +# Add groups and users before installing Dovecot to not break compatibility +RUN groupadd -g 5000 vmail \ && groupadd -g 401 dovecot \ && groupadd -g 402 dovenull \ && useradd -g vmail -u 5000 vmail -d /var/vmail \ && useradd -c "Dovecot unprivileged user" -d /dev/null -u 401 -g dovecot -s /bin/false dovecot \ && useradd -c "Dovecot login user" -d /dev/null -u 402 -g dovenull -s /bin/false dovenull \ && touch /etc/default/locale \ - && apt-get purge -y build-essential automake autotools-dev default-libmysqlclient-dev libbz2-dev libcurl4-openssl-dev libexpat1-dev liblz-dev liblz4-dev liblzma-dev libpam-dev libssl-dev lzma-dev \ + && apt-get update \ + && apt-get -y --no-install-recommends install \ + apt-transport-https \ + ca-certificates \ + cpanminus \ + cron \ + curl \ + dnsutils \ + dirmngr \ + gettext \ + gnupg2 \ + jq \ + libauthen-ntlm-perl \ + libcgi-pm-perl \ + libcrypt-openssl-rsa-perl \ + libcrypt-ssleay-perl \ + libdata-uniqid-perl \ + libdbd-mysql-perl \ + libdbi-perl \ + libdigest-hmac-perl \ + libdist-checkconflicts-perl \ + libfile-copy-recursive-perl \ + libfile-tail-perl \ + libhtml-parser-perl \ + libio-compress-perl \ + libio-socket-inet6-perl \ + libio-socket-ssl-perl \ + libio-tee-perl \ + libipc-run-perl \ + libjson-webtoken-perl \ + liblockfile-simple-perl \ + libmail-imapclient-perl \ + libmodule-implementation-perl \ + libmodule-scandeps-perl \ + libnet-ssleay-perl \ + libpackage-stash-perl \ + libpackage-stash-xs-perl \ + libpar-packer-perl \ + libparse-recdescent-perl \ + libproc-processtable-perl \ + libreadonly-perl \ + libregexp-common-perl \ + libsys-meminfo-perl \ + libterm-readkey-perl \ + libtest-deep-perl \ + libtest-fatal-perl \ + libtest-mock-guard-perl \ + libtest-mockobject-perl \ + libtest-nowarnings-perl \ + libtest-pod-perl \ + libtest-requires-perl \ + libtest-simple-perl \ + libtest-warn-perl \ + libtry-tiny-perl \ + libunicode-string-perl \ + liburi-perl \ + libwww-perl \ + mysql-client \ + procps \ + python-html2text \ + python-jinja2 \ + python-mysql.connector \ + python-redis \ + redis-server \ + supervisor \ + syslog-ng \ + syslog-ng-core \ + syslog-ng-mod-redis \ + && apt-key adv --fetch-keys https://repo.dovecot.org/DOVECOT-REPO-GPG \ + && echo 'deb https://repo.dovecot.org/ce-2.3-latest/debian/stretch stretch main' > /etc/apt/sources.list.d/dovecot.list \ + && apt-get update \ + && apt-get -y --no-install-recommends install \ + dovecot-lua \ + dovecot-managesieved \ + dovecot-sieve \ + dovecot-lmtpd \ + dovecot-ldap \ + dovecot-mysql \ + dovecot-core \ + dovecot-pop3d \ + dovecot-imapd \ + dovecot-solr \ && apt-get autoremove --purge -y \ - && rm -rf /tmp/* /var/tmp/* + && apt-get autoclean \ + && rm -rf /var/lib/apt/lists/* \ + && rm -rf /tmp/* /var/tmp/* /etc/cron.daily/* COPY trim_logs.sh /usr/local/bin/trim_logs.sh +COPY clean_q_aged.sh /usr/local/bin/clean_q_aged.sh COPY syslog-ng.conf /etc/syslog-ng/syslog-ng.conf COPY imapsync /usr/local/bin/imapsync COPY postlogin.sh /usr/local/bin/postlogin.sh COPY imapsync_cron.pl /usr/local/bin/imapsync_cron.pl -COPY report-spam.sieve /usr/local/lib/dovecot/sieve/report-spam.sieve -COPY report-ham.sieve /usr/local/lib/dovecot/sieve/report-ham.sieve -COPY rspamd-pipe-ham /usr/local/lib/dovecot/sieve/rspamd-pipe-ham -COPY rspamd-pipe-spam /usr/local/lib/dovecot/sieve/rspamd-pipe-spam +COPY report-spam.sieve /usr/lib/dovecot/sieve/report-spam.sieve +COPY report-ham.sieve /usr/lib/dovecot/sieve/report-ham.sieve +COPY rspamd-pipe-ham /usr/lib/dovecot/sieve/rspamd-pipe-ham +COPY rspamd-pipe-spam /usr/lib/dovecot/sieve/rspamd-pipe-spam COPY sa-rules.sh /usr/local/bin/sa-rules.sh COPY maildir_gc.sh /usr/local/bin/maildir_gc.sh COPY docker-entrypoint.sh / diff --git a/data/Dockerfiles/dovecot/clean_q_aged.sh b/data/Dockerfiles/dovecot/clean_q_aged.sh new file mode 100755 index 00000000..37ed1ff2 --- /dev/null +++ b/data/Dockerfiles/dovecot/clean_q_aged.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +MAX_AGE=$(redis-cli --raw -h redis-mailcow GET Q_MAX_AGE) + +if [[ -z ${MAX_AGE} ]]; then + echo "Max age for quarantine items not defined" + exit 1 +fi + +NUM_REGEXP='^[0-9]+$' +if ! [[ ${MAX_AGE} =~ ${NUM_REGEXP} ]] ; then + echo "Max age for quarantine items invalid" + exit 1 +fi + +TO_DELETE=$(mysql --socket=/var/run/mysqld/mysqld.sock -u __DBUSER__ -p__DBPASS__ __DBNAME__ -e "SELECT COUNT(id) FROM quarantine WHERE created < NOW() - INTERVAL ${MAX_AGE//[!0-9]/} DAY" -BN) +mysql --socket=/var/run/mysqld/mysqld.sock -u __DBUSER__ -p__DBPASS__ __DBNAME__ -e "DELETE FROM quarantine WHERE created < NOW() - INTERVAL ${MAX_AGE//[!0-9]/} DAY" +echo "Deleted ${TO_DELETE} items from quarantine table (max age is ${MAX_AGE//[!0-9]/} days)" diff --git a/data/Dockerfiles/dovecot/docker-entrypoint.sh b/data/Dockerfiles/dovecot/docker-entrypoint.sh index 0589579d..f016b02a 100755 --- a/data/Dockerfiles/dovecot/docker-entrypoint.sh +++ b/data/Dockerfiles/dovecot/docker-entrypoint.sh @@ -16,10 +16,14 @@ sed -i "s/__DBUSER__/${DBUSER}/g" /usr/local/bin/quarantine_notify.py sed -i "s/__DBPASS__/${DBPASS}/g" /usr/local/bin/quarantine_notify.py sed -i "s/__DBNAME__/${DBNAME}/g" /usr/local/bin/quarantine_notify.py +sed -i "s/__DBUSER__/${DBUSER}/g" /usr/local/bin/clean_q_aged.sh +sed -i "s/__DBPASS__/${DBPASS}/g" /usr/local/bin/clean_q_aged.sh +sed -i "s/__DBNAME__/${DBNAME}/g" /usr/local/bin/clean_q_aged.sh + sed -i "s/__LOG_LINES__/${LOG_LINES}/g" /usr/local/bin/trim_logs.sh # Create missing directories -[[ ! -d /usr/local/etc/dovecot/sql/ ]] && mkdir -p /usr/local/etc/dovecot/sql/ +[[ ! -d /etc/dovecot/sql/ ]] && mkdir -p /etc/dovecot/sql/ [[ ! -d /var/vmail/_garbage ]] && mkdir -p /var/vmail/_garbage [[ ! -d /var/vmail/sieve ]] && mkdir -p /var/vmail/sieve [[ ! -d /etc/sogo ]] && mkdir -p /etc/sogo @@ -29,7 +33,8 @@ sed -i "s/__LOG_LINES__/${LOG_LINES}/g" /usr/local/bin/trim_logs.sh DBPASS=$(echo ${DBPASS} | sed 's/"/\\"/g') # Create quota dict for Dovecot -cat < /usr/local/etc/dovecot/sql/dovecot-dict-sql-quota.conf +cat < /etc/dovecot/sql/dovecot-dict-sql-quota.conf +# Autogenerated by mailcow connect = "host=/var/run/mysqld/mysqld.sock dbname=${DBNAME} user=${DBUSER} password=${DBPASS}" map { pattern = priv/quota/storage @@ -46,7 +51,8 @@ map { EOF # Create dict used for sieve pre and postfilters -cat < /usr/local/etc/dovecot/sql/dovecot-dict-sql-sieve_before.conf +cat < /etc/dovecot/sql/dovecot-dict-sql-sieve_before.conf +# Autogenerated by mailcow connect = "host=/var/run/mysqld/mysqld.sock dbname=${DBNAME} user=${DBUSER} password=${DBPASS}" map { pattern = priv/sieve/name/\$script_name @@ -68,7 +74,8 @@ map { } EOF -cat < /usr/local/etc/dovecot/sql/dovecot-dict-sql-sieve_after.conf +cat < /etc/dovecot/sql/dovecot-dict-sql-sieve_after.conf +# Autogenerated by mailcow connect = "host=/var/run/mysqld/mysqld.sock dbname=${DBNAME} user=${DBUSER} password=${DBPASS}" map { pattern = priv/sieve/name/\$script_name @@ -90,36 +97,41 @@ map { } EOF -echo -n ${ACL_ANYONE} > /usr/local/etc/dovecot/acl_anyone +echo -n ${ACL_ANYONE} > /etc/dovecot/acl_anyone if [[ "${SKIP_SOLR}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then -echo -n 'quota acl zlib listescape mail_crypt mail_crypt_acl mail_log notify' > /usr/local/etc/dovecot/mail_plugins -echo -n 'quota imap_quota imap_acl acl zlib imap_zlib imap_sieve listescape mail_crypt mail_crypt_acl notify mail_log' > /usr/local/etc/dovecot/mail_plugins_imap -echo -n 'quota sieve acl zlib listescape mail_crypt mail_crypt_acl' > /usr/local/etc/dovecot/mail_plugins_lmtp +echo -n 'quota acl zlib listescape mail_crypt mail_crypt_acl mail_log notify' > /etc/dovecot/mail_plugins +echo -n 'quota imap_quota imap_acl acl zlib imap_zlib imap_sieve listescape mail_crypt mail_crypt_acl notify mail_log' > /etc/dovecot/mail_plugins_imap +echo -n 'quota sieve acl zlib listescape mail_crypt mail_crypt_acl' > /etc/dovecot/mail_plugins_lmtp else -echo -n 'quota acl zlib listescape mail_crypt mail_crypt_acl mail_log notify fts fts_solr' > /usr/local/etc/dovecot/mail_plugins -echo -n 'quota imap_quota imap_acl acl zlib imap_zlib imap_sieve listescape mail_crypt mail_crypt_acl notify mail_log fts fts_solr' > /usr/local/etc/dovecot/mail_plugins_imap -echo -n 'quota sieve acl zlib listescape mail_crypt mail_crypt_acl fts fts_solr' > /usr/local/etc/dovecot/mail_plugins_lmtp +echo -n 'quota acl zlib listescape mail_crypt mail_crypt_acl mail_log notify fts fts_solr' > /etc/dovecot/mail_plugins +echo -n 'quota imap_quota imap_acl acl zlib imap_zlib imap_sieve listescape mail_crypt mail_crypt_acl notify mail_log fts fts_solr' > /etc/dovecot/mail_plugins_imap +echo -n 'quota sieve acl zlib listescape mail_crypt mail_crypt_acl fts fts_solr' > /etc/dovecot/mail_plugins_lmtp fi -chmod 644 /usr/local/etc/dovecot/mail_plugins /usr/local/etc/dovecot/mail_plugins_imap /usr/local/etc/dovecot/mail_plugins_lmtp /templates/quarantine.tpl +chmod 644 /etc/dovecot/mail_plugins /etc/dovecot/mail_plugins_imap /etc/dovecot/mail_plugins_lmtp /templates/quarantine.tpl -cat < /usr/local/etc/dovecot/sql/dovecot-dict-sql-userdb.conf +cat < /etc/dovecot/sql/dovecot-dict-sql-userdb.conf +# Autogenerated by mailcow driver = mysql connect = "host=/var/run/mysqld/mysqld.sock dbname=${DBNAME} user=${DBUSER} password=${DBPASS}" -user_query = SELECT CONCAT(JSON_UNQUOTE(JSON_EXTRACT(attributes, '$.mailbox_format')), mailbox_path_prefix, '%d/%n/:VOLATILEDIR=/var/volatile/%u') AS mail, 5000 AS uid, 5000 AS gid, concat('*:bytes=', quota) AS quota_rule FROM mailbox WHERE username = '%u' AND active = '1' +user_query = SELECT CONCAT(JSON_UNQUOTE(JSON_EXTRACT(attributes, '$.mailbox_format')), mailbox_path_prefix, '%d/%n/${MAILDIR_SUB}:VOLATILEDIR=/var/volatile/%u') AS mail, 5000 AS uid, 5000 AS gid, concat('*:bytes=', quota) AS quota_rule FROM mailbox WHERE username = '%u' AND active = '1' iterate_query = SELECT username FROM mailbox WHERE active='1'; EOF # Create pass dict for Dovecot -cat < /usr/local/etc/dovecot/sql/dovecot-dict-sql-passdb.conf +cat < /etc/dovecot/sql/dovecot-dict-sql-passdb.conf +# Autogenerated by mailcow driver = mysql connect = "host=/var/run/mysqld/mysqld.sock dbname=${DBNAME} user=${DBUSER} password=${DBPASS}" default_pass_scheme = SSHA256 password_query = SELECT password FROM mailbox WHERE active = '1' AND username = '%u' AND domain IN (SELECT domain FROM domain WHERE domain='%d' AND active='1') AND JSON_EXTRACT(attributes, '$.force_pw_update') NOT LIKE '%%1%%' EOF -# Create global sieve_after script -cat /usr/local/etc/dovecot/sieve_after > /var/vmail/sieve/global.sieve +# Migrate old sieve_after file +[[ -f /etc/dovecot/sieve_after ]] && mv /etc/dovecot/sieve_after /etc/dovecot/global_sieve_after +# Create global sieve scripts +cat /etc/dovecot/global_sieve_after > /var/vmail/sieve/global_sieve_after.sieve +cat /etc/dovecot/global_sieve_before > /var/vmail/sieve/global_sieve_before.sieve # Check permissions of vmail/attachments directory. # Do not do this every start-up, it may take a very long time. So we use a stat check here. @@ -127,14 +139,51 @@ if [[ $(stat -c %U /var/vmail/) != "vmail" ]] ; then chown -R vmail:vmail /var/v if [[ $(stat -c %U /var/vmail/_garbage) != "vmail" ]] ; then chown -R vmail:vmail /var/vmail/_garbage ; fi if [[ $(stat -c %U /var/attachments) != "vmail" ]] ; then chown -R vmail:vmail /var/attachments ; fi +# Cleanup random user maildirs +rm -rf /var/vmail/mailcow.local/* + + # Create random master for SOGo sieve features RAND_USER=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 16 | head -n 1) RAND_PASS=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 24 | head -n 1) -echo ${RAND_USER}@mailcow.local:{SHA1}$(echo -n ${RAND_PASS} | sha1sum | awk '{print $1}') > /usr/local/etc/dovecot/dovecot-master.passwd -echo ${RAND_USER}@mailcow.local::5000:5000:::: > /usr/local/etc/dovecot/dovecot-master.userdb +echo ${RAND_USER}@mailcow.local:{SHA1}$(echo -n ${RAND_PASS} | sha1sum | awk '{print $1}') > /etc/dovecot/dovecot-master.passwd +echo ${RAND_USER}@mailcow.local::5000:5000:::: > /etc/dovecot/dovecot-master.userdb echo ${RAND_USER}@mailcow.local:${RAND_PASS} > /etc/sogo/sieve.creds +if [[ -z ${MAILDIR_SUB} ]]; then + MAILDIR_SUB_SHARED= +else + MAILDIR_SUB_SHARED=/${MAILDIR_SUB} +fi +cat < /etc/dovecot/shared_namespace.conf +# Autogenerated by mailcow +namespace { + type = shared + separator = / + prefix = Shared/%%u/ + location = maildir:%%h${MAILDIR_SUB_SHARED}:INDEX=~${MAILDIR_SUB_SHARED}/Shared/%%u;CONTROL=~${MAILDIR_SUB_SHARED}/Shared/%%u + subscriptions = no + list = children +} +EOF + +if [[ "${ALLOW_ADMIN_EMAIL_LOGIN}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then + # Create random master Password for SOGo 'login as user' via proxy auth + RAND_PASS=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 32 | head -n 1) + echo -n ${RAND_PASS} > /etc/phpfpm/sogo-sso.pass + cat < /etc/dovecot/sogo-sso.conf +# Autogenerated by mailcow +passdb { + driver = static + args = allow_real_nets=${IPV4_NETWORK}.248/32 password={plain}${RAND_PASS} +} +EOF +else + rm -f /etc/dovecot/sogo-sso.pass + rm -f /etc/dovecot/sogo-sso.conf +fi + # 401 is user dovecot if [[ ! -s /mail_crypt/ecprivkey.pem || ! -s /mail_crypt/ecpubkey.pem ]]; then openssl ecparam -name prime256v1 -genkey | openssl pkey -out /mail_crypt/ecprivkey.pem @@ -145,43 +194,46 @@ else fi # Compile sieve scripts -sievec /var/vmail/sieve/global.sieve -sievec /usr/local/lib/dovecot/sieve/report-spam.sieve -sievec /usr/local/lib/dovecot/sieve/report-ham.sieve +sievec /var/vmail/sieve/global_sieve_before.sieve +sievec /var/vmail/sieve/global_sieve_after.sieve +sievec /usr/lib/dovecot/sieve/report-spam.sieve +sievec /usr/lib/dovecot/sieve/report-ham.sieve # Fix permissions -chown root:root /usr/local/etc/dovecot/sql/*.conf -chown root:dovecot /usr/local/etc/dovecot/sql/dovecot-dict-sql-sieve* /usr/local/etc/dovecot/sql/dovecot-dict-sql-quota* -chmod 640 /usr/local/etc/dovecot/sql/*.conf +chown root:root /etc/dovecot/sql/*.conf +chown root:dovecot /etc/dovecot/sql/dovecot-dict-sql-sieve* /etc/dovecot/sql/dovecot-dict-sql-quota* +chmod 640 /etc/dovecot/sql/*.conf chown -R vmail:vmail /var/vmail/sieve chown -R vmail:vmail /var/volatile adduser vmail tty chmod g+rw /dev/console -chmod +x /usr/local/lib/dovecot/sieve/rspamd-pipe-ham \ - /usr/local/lib/dovecot/sieve/rspamd-pipe-spam \ +chown root:tty /dev/console +chmod +x /usr/lib/dovecot/sieve/rspamd-pipe-ham \ + /usr/lib/dovecot/sieve/rspamd-pipe-spam \ /usr/local/bin/imapsync_cron.pl \ /usr/local/bin/postlogin.sh \ /usr/local/bin/imapsync \ /usr/local/bin/trim_logs.sh \ /usr/local/bin/sa-rules.sh \ + /usr/local/bin/clean_q_aged.sh \ /usr/local/bin/maildir_gc.sh \ /usr/local/sbin/stop-supervisor.sh \ /usr/local/bin/quota_notify.py # Setup cronjobs echo '* * * * * root /usr/local/bin/imapsync_cron.pl 2>&1 | /usr/bin/logger' > /etc/cron.d/imapsync -echo '30 3 * * * vmail /usr/local/bin/doveadm quota recalc -A' > /etc/cron.d/dovecot-sync +#echo '30 3 * * * vmail /usr/local/bin/doveadm quota recalc -A' > /etc/cron.d/dovecot-sync echo '* * * * * vmail /usr/local/bin/trim_logs.sh >> /dev/console 2>&1' > /etc/cron.d/trim_logs echo '25 * * * * vmail /usr/local/bin/maildir_gc.sh >> /dev/console 2>&1' > /etc/cron.d/maildir_gc echo '30 1 * * * root /usr/local/bin/sa-rules.sh >> /dev/console 2>&1' > /etc/cron.d/sa-rules -echo '0 2 * * * root /usr/bin/curl http://solr:8983/solr/dovecot/update?optimize=true >> /dev/console 2>&1' > /etc/cron.d/solr-optimize +echo '0 2 * * * root /usr/bin/curl http://solr:8983/solr/dovecot-fts/update?optimize=true >> /dev/console 2>&1' > /etc/cron.d/solr-optimize echo '*/20 * * * * vmail /usr/local/bin/quarantine_notify.py >> /dev/console 2>&1' > /etc/cron.d/quarantine_notify - +echo '15 4 * * * vmail /usr/local/bin/clean_q_aged.sh >> /dev/console 2>&1' > /etc/cron.d/clean_q_aged # Fix more than 1 hardlink issue touch /etc/crontab /etc/cron.*/* # Clean old PID if any -[[ -f /usr/local/var/run/dovecot/master.pid ]] && rm /usr/local/var/run/dovecot/master.pid +[[ -f /var/run/dovecot/master.pid ]] && rm /var/run/dovecot/master.pid # Clean stopped imapsync jobs rm -f /tmp/imapsync_busy.lock @@ -191,6 +243,20 @@ IMAPSYNC_TABLE=$(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBP # Envsubst maildir_gc echo "$(envsubst < /usr/local/bin/maildir_gc.sh)" > /usr/local/bin/maildir_gc.sh +PUBKEY_MCRYPT=$(doveconf -P | grep -i mail_crypt_global_public_key | cut -d '<' -f2) +if [ -f ${PUBKEY_MCRYPT} ]; then + GUID=$(cat <(echo ${MAILCOW_HOSTNAME}) /mail_crypt/ecpubkey.pem | sha256sum | cut -d ' ' -f1 | tr -cd "[a-fA-F0-9.:/] ") + if [ ${#GUID} -eq 64 ]; then + mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} << EOF +REPLACE INTO versions (application, version) VALUES ("GUID", "${GUID}"); +EOF + else + mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} << EOF +REPLACE INTO versions (application, version) VALUES ("GUID", "INVALID"); +EOF + fi +fi + # Collect SA rules once now /usr/local/bin/sa-rules.sh diff --git a/data/Dockerfiles/dovecot/imapsync b/data/Dockerfiles/dovecot/imapsync index cfbd1ee8..a75795b0 100755 --- a/data/Dockerfiles/dovecot/imapsync +++ b/data/Dockerfiles/dovecot/imapsync @@ -1,6 +1,6 @@ -#!/usr/bin/perl +#!/usr/bin/env perl -# $Id: imapsync,v 1.882 2018/05/05 21:10:43 gilles Exp gilles $ +# $Id: imapsync,v 1.937 2019/05/01 22:14:00 gilles Exp gilles $ # structure # pod documentation # use pragmas @@ -10,7 +10,7 @@ # default values # folder loop # subroutines -# sub usage { +# sub usage # pod documentation @@ -19,13 +19,13 @@ =head1 NAME -imapsync - Email IMAP tool for syncing, copying and migrating -email mailboxes between two imap servers, one way, +imapsync - Email IMAP tool for syncing, copying and migrating +email mailboxes between two imap servers, one way, and without duplicates. =head1 VERSION -This documentation refers to Imapsync $Revision: 1.882 $ +This documentation refers to Imapsync $Revision: 1.937 $ =head1 USAGE @@ -47,23 +47,23 @@ one another. Imapsync command is a tool allowing incremental and recursive imap transfers from one mailbox to another. -By default all folders are transferred, recursively, meaning -the whole folder hierarchy is taken, all messages in them, -and all messages flags (\Seen \Answered \Flagged etc.) +By default all folders are transferred, recursively, meaning +the whole folder hierarchy is taken, all messages in them, +and all messages flags (\Seen \Answered \Flagged etc.) are synced too. -Imapsync reduces the amount of data transferred by not transferring -a given message if it resides already on both sides. +Imapsync reduces the amount of data transferred by not transferring +a given message if it resides already on both sides. -Same specific headers and the transfer is done only once. -By default, the identification headers are -"Message-Id:" and "Received:" lines +Same specific headers and the transfer is done only once. +By default, the identification headers are +"Message-Id:" and "Received:" lines but this choice can be changed with the --useheader option. -All flags are preserved, unread messages will stay unread, +All flags are preserved, unread messages will stay unread, read ones will stay read, deleted will stay deleted. -You can stop the transfer at any time and restart it later, +You can stop the transfer at any time and restart it later, imapsync works well with bad connections and interruptions, by design. @@ -75,7 +75,7 @@ In that case, use the --delete1 option. Option --delete1 implies also option --expunge1 so all messages marked deleted on host1 will be really deleted. -You can also decide to remove empty folders once all of their +You can also decide to remove empty folders once all of their messages have been transferred. Add --delete1emptyfolders to obtain this behavior. @@ -98,7 +98,7 @@ Michael R. Elkins) for a 2 ways synchronization. usage: imapsync [options] Mandatory options are the six values, three on each sides, -needed to log in into the IMAP servers, ie, +needed to log in into the IMAP servers, ie, a host, a username, and a password, two times. Conventions used: @@ -108,20 +108,20 @@ Conventions used: reg means regular expression cmd means command - --dry : Makes imapsync doing nothing for real, just print what - would be done without --dry. + --dry : Makes imapsync doing nothing for real, just print what + would be done without --dry. =head2 OPTIONS/credentials --host1 str : Source or "from" imap server. Mandatory. - --port1 int : Port to connect on host1. + --port1 int : Port to connect on host1. Optional since default port is 143 or 993 if --ssl1 --user1 str : User to login on host1. Mandatory. --password1 str : Password for the user1. --host2 str : "destination" imap server. Mandatory. - --port2 int : Port to connect on host2. + --port2 int : Port to connect on host2. Optional since default port is 143 or 993 if --ssl2 --user2 str : User to login on host2. Mandatory. --password2 str : Password for the user2. @@ -135,6 +135,9 @@ Conventions used: the password on the command line like --password1 does. --passfile2 str : Password file for the user2. Contains the password. +You can also pass the passwords in the environment variables +IMAPSYNC_PASSWORD1 and IMAPSYNC_PASSWORD2 + =head2 OPTIONS/encryption --nossl1 : Do not use a SSL connection on host1. @@ -219,10 +222,18 @@ Conventions used: --f1f2 str1=str2 : Force folder str1 to be synced to str2, --f1f2 overrides --automap and --regextrans2. - --subfolder2 str : Move whole host1 folders hierarchy under this - host2 folder str . - It does it by adding two --regextrans2 options before - all others. Add --debug to see what's really going on. + --subfolder2 str : Syncs the whole host1 folders hierarchy under the + host2 folder named str. + It does it internally by adding three + --regextrans2 options before all others. + Add --debug to see what's really going on. + + --subfolder1 str : Syncs the host1 folders hierarchy under str + to the root hierarchy of host2. + It's the couterpart of a sync done by --subfolder2 + in the reverse order. Use --subfolder2 str + for a backup under str and --subfolder1 str + for the restore from str. --subscribed : Transfers subscribed folders. --subscribe : Subscribe to the folders transferred on the @@ -252,7 +263,7 @@ Conventions used: --nofoldersizes : Do not calculate the size of each folder at the beginning of the sync. Default is to calculate them. - --nofoldersizesatend: Do not calculate the size of each folder at the + --nofoldersizesatend: Do not calculate the size of each folder at the end of the sync. Default is to calculate them. --justfoldersizes : Exit after having printed the initial folder sizes. @@ -306,7 +317,7 @@ Conventions used: --resyncflags : Resync flags for already transferred messages. On by default. --noresyncflags : Do not resync flags for already transferred messages. - May be useful when a user has already started to play + May be useful when a user has already started to play with its host2 account. =head2 OPTIONS/deletions @@ -377,7 +388,7 @@ Conventions used: command. Applied on both sides. For a complete of what can be search see https://imapsync.lamiral.info/FAQ.d/FAQ.Messages_Selection.txt - + --search1 str : Same as --search but for selecting host1 messages only. --search2 str : Same as --search but for selecting host2 messages only. --search CRIT equals --search1 CRIT --search2 CRIT @@ -403,7 +414,10 @@ Conventions used: --syncacls : Synchronizes acls (Access Control Lists). --nosyncacls : Does not synchronize acls. This is the default. Acls in IMAP are not standardized, be careful. - + --addheader : When a message has no headers to be identified, + --addheader adds a "Message-Id" header, + like "Message-Id: 12345@imapsync", where 12345 + is the imap UID of the message on the host1 folder. =head2 OPTIONS/debugging @@ -426,35 +440,35 @@ Conventions used: Useful to check the ipv6 connectivity. Needs internet. -=head2 OPTIONS/specific +=head2 OPTIONS/specific --gmail1 : sets --host1 to Gmail and options from FAQ.Gmail.txt --gmail2 : sets --host2 to Gmail and options from FAQ.Gmail.txt - + --office1 : sets --host1 to Office365 options from FAQ.Exchange.txt --office2 : sets --host2 to Office365 options from FAQ.Exchange.txt --exchange1 : sets options from FAQ.Exchange.txt, account1 part --exchange2 : sets options from FAQ.Exchange.txt, account2 part - + --domino1 : sets options from FAQ.Domino.txt, account1 part --domino2 : sets options from FAQ.Domino.txt, account2 part - - - -=head2 OPTIONS/behavior + + + +=head2 OPTIONS/behavior --maxmessagespersecond int : limits the number of messages transferred per second. - + --maxbytespersecond int : limits the average transfer rate per second. - --maxbytesafter int : starts --maxbytespersecond limitation only after + --maxbytesafter int : starts --maxbytespersecond limitation only after --maxbytesafter amount of data transferred. - + --maxsleep int : do not sleep more than int seconds. On by default, 2 seconds max, like --maxsleep 2 - --abort : terminates a previous call still running. + --abort : terminates a previous call still running. It uses the pidfile to know what process to abort. --exitwhenover int : Stop syncing when total bytes transferred reached. @@ -498,12 +512,12 @@ dangerous because of the 'ps auxwwwwe' command. So, saving the password in a well protected file (600 or rw-------) is the best solution. -Imapsync activates ssl or tls encryption by default, if possible. -What detailed behavior is under this "if possible"? -Imapsync activates ssl if the well known port imaps port (993) is open -on the imap servers. If the imaps port is closed then it open a -normal (clear) connection on port 143 but it looks for TLS support -in the CAPABILITY list of the servers. If TLS is supported +Imapsync activates ssl or tls encryption by default, if possible. +What detailed behavior is under this "if possible"? +Imapsync activates ssl if the well known port imaps port (993) is open +on the imap servers. If the imaps port is closed then it open a +normal (clear) connection on port 143 but it looks for TLS support +in the CAPABILITY list of the servers. If TLS is supported then imapsync goes to encryption. If the automatic ssl/tls detection fails then imapsync will @@ -519,7 +533,28 @@ or at https://imapsync.lamiral.info/FAQ.d/FAQ.Security.txt Imapsync will exit with a 0 status (return code) if everything went good. Otherwise, it exits with a non-zero status. +Here is the list of the exit code values (an integer between 0 and 255), +the names reflects their meaning: +=for comment +egrep '^Readonly my.*\$EX' imapsync | egrep -o 'EX.*' | sed 's_^_ _' + + + EX_OK => 0 ; #/* successful termination */ + EX_USAGE => 64 ; #/* command line usage error */ + EX_NOINPUT => 66 ; #/* cannot open input */ + EX_UNAVAILABLE => 69 ; #/* service unavailable */ + EX_SOFTWARE => 70 ; #/* internal software error */ + EXIT_CATCH_ALL => 1 ; # Any other error + EXIT_BY_SIGNAL => 6 ; # Should be 128+n where n is the sig_num + EXIT_PID_FILE_ERROR => 8 ; + EXIT_CONNECTION_FAILURE => 10 ; + EXIT_TLS_FAILURE => 12 ; + EXIT_AUTHENTICATION_FAILURE => 16 ; + EXIT_SUBFOLDER1_NO_EXISTS => 21 ; + EXIT_WITH_ERRORS => 111 ; + EXIT_WITH_ERRORS_MAX => 112 ; + EXIT_TESTS_FAILED => 254 ; # Like Test::More API =head1 LICENSE AND COPYRIGHT @@ -546,7 +581,7 @@ Feedback good or bad is very often welcome. Gilles LAMIRAL earns his living by writing, installing, configuring and teaching free, open and often gratis software. Imapsync used to be "always gratis" but now it is -only "often gratis" because imapsync is sold by its author, +only "often gratis" because imapsync is sold by its author, a good way to maintain and support free open public software over decades. @@ -609,13 +644,13 @@ https://imapsync.lamiral.info/examples/ =head1 INSTALL Imapsync works under any Unix with perl. - + Imapsync works under most Windows (2000, XP, Vista, Seven, Eight, Ten and all Server releases 2000, 2003, 2008 and R2, 2012 and R2) as a standalone binary software called imapsync.exe, - usually launched from a batch file in order to avoid always typing + usually launched from a batch file in order to avoid always typing the options. - + Imapsync works under OS X as a standalone binary software called imapsync_bin_Darwin @@ -652,38 +687,54 @@ Feel free to hack imapsync as the NOLIMIT license permits it. See also https://imapsync.lamiral.info/S/external.shtml for a better up to date list. - imap_tools : https://github.com/andrewnimmo/rick-sanders-imap-tools - offlineimap : https://github.com/nicolas33/offlineimap - Doveadm-Sync : http://wiki2.dovecot.org/Tools/Doveadm/Sync - ( Dovecot sync tool ) - mbsync : http://isync.sourceforge.net/ - mailsync : http://mailsync.sourceforge.net/ - mailutil : http://www.washington.edu/imap/ - part of the UW IMAP tookit. - imaprepl : http://www.bl0rg.net/software/ - http://freecode.com/projects/imap-repl/ - imapcopy : http://www.ardiehl.de/imapcopy/ - migrationtool : http://sourceforge.net/projects/migrationtool/ - imapmigrate : http://sourceforge.net/projects/cyrus-utils/ - wonko_imapsync: http://wonko.com/article/554 - see also file W/tools/wonko_ruby_imapsync - exchange-away : http://exchange-away.sourceforge.net/ - pop2imap : http://www.linux-france.org/prj/pop2imap/ +Last updated and verified on Thu Apr 11, 2019. - -Feedback (good or bad) will often be welcome. + imapsync : https://github.com/imapsync/imapsync + (this is an imapsync copy, sometimes delayed, + with --noreleasecheck by default since release 1.592, 2014/05/22) + imap_tools : https://web.archive.org/web/20161228145952/http://www.athensfbc.com/imap_tools/ + The imap_tools code is now at + https://github.com/andrewnimmo/rick-sanders-imap-tools + imaputils : https://github.com/mtsatsenko/imaputils (very old imap_tools fork) + Doveadm-Sync : https://wiki2.dovecot.org/Tools/Doveadm/Sync ( Dovecot sync tool ) + davmail : http://davmail.sourceforge.net/ + offlineimap : http://offlineimap.org/ + mbsync : http://isync.sourceforge.net/ + mailsync : http://mailsync.sourceforge.net/ + mailutil : http://www.washington.edu/imap/ part of the UW IMAP tookit. + imaprepl : https://bl0rg.net/software/ http://freecode.com/projects/imap-repl/ + imapcopy (Pascal): http://www.ardiehl.de/imapcopy/ + imapcopy (Java) : https://code.google.com/archive/p/imapcopy/ + imapsize : http://www.broobles.com/imapsize/ + migrationtool : http://sourceforge.net/projects/migrationtool/ + imapmigrate : http://sourceforge.net/projects/cyrus-utils/ + larch : https://github.com/rgrove/larch (derived from wonko_imapsync, good at Gmail) + wonko_imapsync : http://wonko.com/article/554 (superseded by larch) + pop2imap : http://www.linux-france.org/prj/pop2imap/ (I wrote that too) + exchange-away : http://exchange-away.sourceforge.net/ + SyncBackPro : http://www.2brightsparks.com/syncback/sbpro.html + ImapSyncClient : https://github.com/ridaamirini/ImapSyncClient + MailStore : https://www.mailstore.com/en/products/mailstore-home/ + mnIMAPSync : https://github.com/manusa/mnIMAPSync + imap-upload : http://imap-upload.sourceforge.net/ + (a tool for uploading a local mbox file to IMAP4 server) =head1 HISTORY I wrote imapsync because an enterprise (basystemes) paid me to install a new imap server without losing huge old mailboxes located in a far -away remote imap server, accessible by a low-bandwidth often broken link. -The tool imapcp (written in python) could not help me because I had to verify -every mailbox was well transferred, and then delete it after a good -transfer. Imapsync started its life as a patch of the copy_folder.pl +away remote imap server, accessible by an often broken low-bandwidth ISDN link. + +I had to verify every mailbox was well transferred, all folders, all messages, +without wasting bandwidth or creating duplicates upon resyncs. The design was +made with the rsync command in mind. + +Imapsync started its life as a patch of the copy_folder.pl script. The script copy_folder.pl comes from the Mail-IMAPClient-2.1.3 perl -module tarball source (more precisely in the examples/ directory of the -Mail-IMAPClient tarball). So many happened since then that I wonder +module tarball source (more precisely in the examples/ directory of the +Mail-IMAPClient tarball). + +So many happened since then that I wonder if it remains any lines of the original copy_folder.pl in imapsync source code. @@ -728,6 +779,8 @@ use Cwd ; use Readonly ; use Sys::MemInfo ; use Regexp::Common ; +use Text::ParseWords; +use File::Tail ; local $OUTPUT_AUTOFLUSH = 1 ; @@ -735,6 +788,10 @@ local $OUTPUT_AUTOFLUSH = 1 ; # Let us do like sysexits.h # /usr/include/sysexits.h +# and https://www.tldp.org/LDP/abs/html/exitcodes.html + +# Should avoid 2 126 127 128..128+64=192 255 +# Should use 0 1 3..125 193..254 Readonly my $EX_OK => 0 ; #/* successful termination */ Readonly my $EX_USAGE => 64 ; #/* command line usage error */ @@ -754,21 +811,35 @@ Readonly my $EX_SOFTWARE => 70 ; #/* internal software error */ #Readonly my $EX_CONFIG => 78 ; #/* configuration error */ # Mine -Readonly my $EXIT_BY_SIGNAL => 6 ; +Readonly my $EXIT_CATCH_ALL => 1 ; # Any other error + +Readonly my $EXIT_BY_SIGNAL => 6 ; # Should be 128+n where n is the sig_num Readonly my $EXIT_PID_FILE_ERROR => 8 ; +Readonly my $EXIT_CONNECTION_FAILURE => 10 ; +Readonly my $EXIT_TLS_FAILURE => 12 ; +Readonly my $EXIT_AUTHENTICATION_FAILURE => 16 ; +Readonly my $EXIT_SUBFOLDER1_NO_EXISTS => 21 ; + Readonly my $EXIT_WITH_ERRORS => 111 ; Readonly my $EXIT_WITH_ERRORS_MAX => 112 ; -Readonly my $EXIT_UNKNOWN => 126 ; + Readonly my $EXIT_TESTS_FAILED => 254 ; # Like Test::More API + + + + + + Readonly my $DEFAULT_LOGDIR => 'LOG_imapsync' ; Readonly my $ERRORS_MAX => 50 ; # exit after 50 errors. Readonly my $ERRORS_MAX_CGI => 20 ; # exit after 20 errors in CGI context. + Readonly my $INTERVAL_TO_EXIT => 2 ; # interval max to exit instead of reconnect Readonly my $SPLIT => 100 ; # By default, 100 at a time, not more. @@ -804,6 +875,8 @@ Readonly my $NUMBER_42 => 42 ; Readonly my $NUMBER_100 => 100 ; Readonly my $NUMBER_200 => 200 ; Readonly my $NUMBER_300 => 300 ; +Readonly my $NUMBER_123456 => 123456 ; +Readonly my $NUMBER_654321 => 654321 ; Readonly my $NUMBER_20_000 => 20_000 ; @@ -828,6 +901,14 @@ Readonly my $UMASK_PARANO => '0077' ; Readonly my $STR_use_releasecheck => q{Check if a new imapsync release is available by adding --releasecheck} ; +Readonly my $GMAIL_MAXSIZE => 35_651_584 ; + + +# if ( 'MSWin32' eq $OSNAME ) +# if ( 'darwin' eq $OSNAME ) +# if ( 'linux' eq $OSNAME ) + + # global variables # Currently working to finish with only $sync @@ -835,49 +916,39 @@ Readonly my $STR_use_releasecheck => q{Check if a new imapsync release is availa my( $sync, - $debug, $debugimap, $debugimap1, $debugimap2, $debugcontent, $debugflags, + $debugimap, $debugimap1, $debugimap2, $debugcontent, $debugflags, $debuglist, $debugdev, $debugmaxlinelength, $debugcgi, $domain1, $domain2, - $passfile1, $passfile2, @include, @exclude, @folderrec, @folderfirst, @folderlast, $prefix1, $prefix2, - $subfolder2, - @regextrans2, @regexmess, @regexflag, @skipmess, @pipemess, $pipemesscheck, + @regexmess, @regexflag, @skipmess, @pipemess, $pipemesscheck, $flagscase, $filterflags, $syncflagsaftercopy, - $sep1, $sep2, $syncinternaldates, $idatefromheader, $syncacls, $fastio1, $fastio2, - $maxsize, $minsize, $maxage, $minage, - $exitwhenover, + $minsize, $maxage, $minage, $search, $search1, $search2, $skipheader, @useheader, $skipsize, $allowsizemismatch, $foldersizes, $foldersizesatend, $buffersize, - $delete1, $delete2, $delete2duplicates, - $expunge1, $expunge2, $uidexpunge2, - $justfoldersizes, + + $authmd5, $authmd51, $authmd52, $subscribed, $subscribe, $subscribeall, $help, - $justfolders, $justbanner, + $justbanner, $fast, - $total_bytes_skipped, - $total_bytes_error, - $nb_msg_skipped, + $nb_msg_skipped_dry_mode, $h1_nb_msg_duplicate, $h2_nb_msg_duplicate, - $h1_nb_msg_noheader, $h2_nb_msg_noheader, - $h1_total_bytes_duplicate, - $h2_total_bytes_duplicate, - $h1_nb_msg_deleted, + $h2_nb_msg_deleted, $h1_bytes_processed, - $h1_nb_msg_processed, + $h1_nb_msg_start, $h1_bytes_start, $h2_nb_msg_start, $h2_bytes_start, $h1_nb_msg_end, $h1_bytes_end, @@ -897,10 +968,7 @@ my( $delete2folders, $delete2foldersonly, $delete2foldersbutnot, $usecache, $debugcache, $cacheaftercopy, $wholeheaderifneeded, %h1_msgs_copy_by_uid, $useuid, $h2_uidguess, - %h1, %h2, $checkmessageexists, - $expungeaftereach, - $fixslash2, $messageidnodomain, $fixInboxINBOX, $maxlinelength, $maxlinelengthcmd, @@ -912,18 +980,18 @@ my( $disarmreadreceipts, $mixfolders, $skipemptyfolders, $fetch_hash_set, -); +) ; # main program # global variables initialization -# Currently removing all global variables except $sync +# I'm currently removing all global variables except $sync # passing each of them under $sync->{variable_name} $sync->{timestart} = time ; # Is a float because of use Time::HiRres -$sync->{rcs} = q{$Id: imapsync,v 1.882 2018/05/05 21:10:43 gilles Exp gilles $} ; +$sync->{rcs} = q{$Id: imapsync,v 1.937 2019/05/01 22:14:00 gilles Exp gilles $} ; $sync->{ memory_consumption_at_start } = memory_consumption( ) || 0 ; @@ -932,26 +1000,27 @@ my @loadavg = loadavg( ) ; $sync->{cpu_number} = cpu_number( ) ; $sync->{loaddelay} = load_and_delay( $sync->{cpu_number}, @loadavg ) ; -$sync->{loadavg} = join( q{ }, $loadavg[ 0 ] ) +$sync->{loadavg} = join( q{ }, $loadavg[ 0 ] ) . " on $sync->{cpu_number} cores and " . ram_memory_info( ) ; -$sync->{total_bytes_transferred} = 0 ; -$total_bytes_skipped = 0; -$total_bytes_error = 0; -$sync->{nb_msg_transferred} = 0; -$nb_msg_skipped = $nb_msg_skipped_dry_mode = 0; -$h1_nb_msg_deleted = $h2_nb_msg_deleted = 0; +$sync->{ total_bytes_transferred } = 0 ; +$sync->{ total_bytes_skipped } = 0; +$sync->{ nb_msg_transferred } = 0; +$sync->{ nb_msg_skipped } = $nb_msg_skipped_dry_mode = 0; +$sync->{ h1_nb_msg_deleted } = $h2_nb_msg_deleted = 0; $h1_nb_msg_duplicate = $h2_nb_msg_duplicate = 0; -$h1_nb_msg_noheader = $h2_nb_msg_noheader = 0; -$h1_total_bytes_duplicate = $h2_total_bytes_duplicate = 0; +$sync->{ h1_nb_msg_noheader } = 0 ; +$h2_nb_msg_noheader = 0 ; $h1_nb_msg_start = $h1_bytes_start = 0 ; $h2_nb_msg_start = $h2_bytes_start = 0 ; -$h1_nb_msg_processed = $h1_bytes_processed = 0 ; +$sync->{ h1_nb_msg_processed } = $h1_bytes_processed = 0 ; + +$sync->{ h2_nb_msg_crossdup } = 0 ; #$h1_nb_msg_end = $h1_bytes_end = 0 ; #$h2_nb_msg_end = $h2_bytes_end = 0 ; @@ -977,7 +1046,7 @@ my %month_abrev = ( my $cgidir ; -# Just create a CGI object if under cgi context only. +# Just create a CGI object if under cgi context only. # Needed for the get_options() call cgibegin( $sync ) ; @@ -990,8 +1059,7 @@ cgibuildheader( $sync ) ; myprint( output( $sync ) ) ; output_reset_with( $sync ) ; -# Can break here if load is too heavy -cgiload( $sync ) ; +# Old place for cgiload( $sync ) ; # don't go on if options are not all known. if ( ! defined $options_good ) { exit $EX_USAGE ; } @@ -1001,8 +1069,8 @@ if ( ! defined $options_good ) { exit $EX_USAGE ; } # the second line (ending with "1 ;") can then stay active or be commented, # the result will be the same: no releasecheck by default (because 0 is then the defined value). -$sync->{releasecheck} = defined $sync->{releasecheck} ? $sync->{releasecheck} : 0 ; -#$sync->{releasecheck} = defined $sync->{releasecheck} ? $sync->{releasecheck} : 1 ; +#$sync->{releasecheck} = defined $sync->{releasecheck} ? $sync->{releasecheck} : 0 ; +$sync->{releasecheck} = defined $sync->{releasecheck} ? $sync->{releasecheck} : 1 ; # just the version if ( $sync->{ version } ) { @@ -1020,8 +1088,12 @@ after_get_options( $sync, $options_good ) ; # Under CGI environment, fix caveat emptor potential issues cgisetcontext( $sync ) ; +# --gmail --gmail --exchange --office etc. easyany( $sync ) ; +$sync->{ sanitize } = defined $sync->{ sanitize } ? $sync->{ sanitize } : 1 ; +sanitize( $sync ) ; + $sync->{ tmpdir } ||= File::Spec->tmpdir( ) ; # Unit tests @@ -1032,41 +1104,54 @@ testslive( $sync ) if ( $sync->{testslive} ) ; testslive6( $sync ) if ( $sync->{testslive6} ) ; # + $sync->{pidfile} = defined $sync->{pidfile} ? $sync->{pidfile} : $sync->{ tmpdir } . '/imapsync.pid' ; $sync->{pidfilelocking} = defined $sync->{pidfilelocking} ? $sync->{pidfilelocking} : 0 ; # old abort place -@{ $sync->{ sigexit } } = ( defined( $sync->{ sigexit } ) ) ? @{ $sync->{ sigexit } } : ( 'QUIT', 'TERM' ) ; +# Unix signals +@{ $sync->{ sigexit } } = ( defined( $sync->{ sigexit } ) ) ? @{ $sync->{ sigexit } } : ( 'QUIT', 'TERM' ) ; @{ $sync->{ sigreconnect } } = ( defined( $sync->{ sigreconnect } ) ) ? @{ $sync->{ sigreconnect } } : ( 'INT' ) ; +@{ $sync->{ sigprint } } = ( defined( $sync->{ sigprint } ) ) ? @{ $sync->{ sigprint } } : ( 'HUP' ) ; +@{ $sync->{ sigignore } } = ( defined( $sync->{ sigignore } ) ) ? @{ $sync->{ sigignore } } : ( ) ; -sig_install( $sync, \&catch_exit, @{ $sync->{ sigexit } } ) ; +local %SIG = %SIG ; +sig_install( $sync, \&catch_exit, @{ $sync->{ sigexit } } ) ; sig_install( $sync, \&catch_reconnect, @{ $sync->{ sigreconnect } } ) ; -# --sigignore can override sigexit and sigreconnect (for the same signals only) +sig_install( $sync, \&catch_print, @{ $sync->{ sigprint } } ) ; +# --sigignore can override sigexit, sigreconnect and sigprint (for the same signals only) sig_install( $sync, \&catch_ignore, @{ $sync->{ sigignore } } ) ; -sig_install( $sync, \&toggle_sleep, 'USR1' ) ; +sig_install_toggle_sleep( $sync ) ; + $sync->{log} = defined $sync->{log} ? $sync->{log} : 1 ; $sync->{errorsdump} = defined $sync->{errorsdump} ? $sync->{errorsdump} : 1 ; $sync->{errorsmax} = defined $sync->{errorsmax} ? $sync->{errorsmax} : $ERRORS_MAX ; - +# log and output if ( $sync->{log} ) { setlogfile( $sync ) ; teelaunch( $sync ) ; # now $sync->{tee} is a filehandle to STDOUT and the logfile } # STDERR goes to the same place: LOG and STDOUT (if logging is on) -$sync->{tee} and local *STDERR = *${$sync->{tee}}{IO} ; - +$sync->{tee} and local *STDERR = *${$sync->{tee}}{IO} ; + + + $timestart_int = int( $sync->{timestart} ) ; $timebefore = $sync->{timestart} ; + my $timestart_str = localtime( $sync->{timestart} ) ; + +# The prints in the log starts here + myprint( localhost_info( $sync ), "\n" ) ; myprint( "Transfer started at $timestart_str\n" ) ; -myprint( "PID is $PROCESS_ID\n" ) ; +myprint( "PID is $PROCESS_ID my PPID is ", mygetppid( ), "\n" ) ; myprint( "Log file is $sync->{logfile} ( to change it, use --logfile path ; or use --nolog to turn off logging )\n" ) if ( $sync->{log} ) ; myprint( "Load is " . ( join( q{ }, loadavg( ) ) || 'unknown' ), " on $sync->{cpu_number} cores\n" ) ; #myprintf( "Memory consumption so far: %.1f MiB\n", memory_consumption( ) / $KIBI / $KIBI ) ; @@ -1076,7 +1161,6 @@ myprint( 'Effective user id is ' . getpwuid_any_os( $EFFECTIVE_USER_ID ). " (eui $modulesversion = defined $modulesversion ? $modulesversion : 1 ; - my $warn_release = ( $sync->{releasecheck} ) ? check_last_release( ) : $STR_use_releasecheck ; @@ -1089,7 +1173,7 @@ $cacheaftercopy = 1 if ( $usecache and ( ! defined $cacheaftercopy ) ) ; $sync->{ checkselectable } = defined $sync->{ checkselectable } ? $sync->{ checkselectable } : 1 ; $sync->{ checkfoldersexist } = defined $sync->{ checkfoldersexist } ? $sync->{ checkfoldersexist } : 1 ; $checkmessageexists = defined $checkmessageexists ? $checkmessageexists : 0 ; -$expungeaftereach = defined $expungeaftereach ? $expungeaftereach : 1 ; +$sync->{ expungeaftereach } = defined $sync->{ expungeaftereach } ? $sync->{ expungeaftereach } : 1 ; # abletosearch is on by default $sync->{abletosearch} = defined $sync->{abletosearch} ? $sync->{abletosearch} : 1 ; @@ -1099,41 +1183,65 @@ $checkmessageexists = 0 if ( not $sync->{abletosearch1} ) ; $sync->{showpasswords} = defined $sync->{showpasswords} ? $sync->{showpasswords} : 0 ; -$fixslash2 = defined $fixslash2 ? $fixslash2 : 1 ; +$sync->{ fixslash2 } = defined $sync->{ fixslash2 } ? $sync->{ fixslash2 } : 1 ; $fixInboxINBOX = defined $fixInboxINBOX ? $fixInboxINBOX : 1 ; $create_folder_old = defined $create_folder_old ? $create_folder_old : 0 ; $mixfolders = defined $mixfolders ? $mixfolders : 1 ; $sync->{automap} = defined $sync->{automap} ? $sync->{automap} : 0 ; -$delete2duplicates = 1 if ( $delete2 and ( ! defined $delete2duplicates ) ) ; +$sync->{ delete2duplicates } = 1 if ( $sync->{ delete2 } and ( ! defined $sync->{ delete2duplicates } ) ) ; $sync->{maxmessagespersecond} = defined $sync->{maxmessagespersecond} ? $sync->{maxmessagespersecond} : 0 ; $sync->{maxbytespersecond} = defined $sync->{maxbytespersecond} ? $sync->{maxbytespersecond} : 0 ; $sync->{sslcheck} = defined $sync->{sslcheck} ? $sync->{sslcheck} : 1 ; -myprint( banner_imapsync( @ARGV ) ) ; +myprint( banner_imapsync( $sync, @ARGV ) ) ; myprint( "Temp directory is $sync->{ tmpdir } ( to change it use --tmpdir dirpath )\n" ) ; + myprint( output( $sync ) ) ; +output_reset_with( $sync ) ; do_valid_directory( $sync->{ tmpdir } ) || croak "Error creating tmpdir $sync->{ tmpdir } : $OS_ERROR" ; remove_pidfile_not_running( $sync->{pidfile} ) ; +# if another imapsync is running then tail -f its logfile and exit +# useful in cgi context +if ( $sync->{tail} and tail( $sync ) ) +{ + myprint( "Tail -f finished. Now finishing myself\n" ) ; + exit_clean( $sync, $EX_OK ) ; + exit $EX_OK ; +} if ( ! write_pidfile( $sync ) ) { + myprint( "Exiting with return value $EXIT_PID_FILE_ERROR\n" ) ; exit $EXIT_PID_FILE_ERROR ; } -# simulong is just a loop printing some lines for xx seconds with option "--simulong xx". -if ( $sync->{simulong} ) { simulong( $sync->{simulong} ) ; } -# New place to abort -if ( $sync->{abort} ) { - abort( $sync ) ; +# New place for abort +# abort before simulong in order to be able to abort a simulong sync +if ( $sync->{ abort } ) +{ + abort( $sync ) ; } +# simulong is just a loop printing some lines for xx seconds with option "--simulong xx". +if ( $sync->{ simulong } ) +{ + simulong( $sync->{ simulong } ) ; +} + + +# New place for cgiload 2019_03_03 +# because I want to log it +# Can break here if load is too heavy +cgiload( $sync ) ; + + $fixcolonbug = defined $fixcolonbug ? $fixcolonbug : 1 ; if ( $usecache and $fixcolonbug ) { tmpdir_fix_colon_bug( $sync ) } ; @@ -1141,7 +1249,7 @@ if ( $usecache and $fixcolonbug ) { tmpdir_fix_colon_bug( $sync ) } ; $modulesversion and myprint( "Modules version list:\n", modulesversion(), "( use --no-modulesversion to turn off printing this Perl modules list )\n" ) ; -check_lib_version( ) or +check_lib_version( $sync ) or croak "imapsync needs perl lib Mail::IMAPClient release 3.30 or superior.\n"; @@ -1167,21 +1275,20 @@ if ( $sync->{resyncflags} ) { } - sslcheck( $sync ) ; - +#print Data::Dumper->Dump( [ \$sync ] ) ; $split1 ||= $SPLIT ; $split2 ||= $SPLIT ; -$sync->{host1} || missing_option( '--host1' ) ; +$sync->{host1} || missing_option( $sync, '--host1' ) ; $sync->{port1} ||= ( $sync->{ssl1} ) ? $IMAP_SSL_PORT : $IMAP_PORT ; -$sync->{host2} || missing_option( '--host2' ) ; +$sync->{host2} || missing_option( $sync, '--host2' ) ; $sync->{port2} ||= ( $sync->{ssl2} ) ? $IMAP_SSL_PORT : $IMAP_PORT ; $debugimap1 = $debugimap2 = 1 if ( $debugimap ) ; -$debug = 1 if ( $debugimap1 or $debugimap2 ) ; +$sync->{ debug } = 1 if ( $debugimap1 or $debugimap2 ) ; # By default, don't take size to compare $skipsize = (defined $skipsize) ? $skipsize : 1; @@ -1200,8 +1307,6 @@ if ( defined $delete2foldersbutnot or defined $delete2foldersonly ) { } - - my $SSL_VERIFY_POLICY ; my %SSL_VERIFY_STR ; @@ -1210,8 +1315,8 @@ Readonly %SSL_VERIFY_STR => ( IO::Socket::SSL::SSL_VERIFY_NONE( ) => 'SSL_VERIFY_NONE, ie, do not check the certificate server.' , IO::Socket::SSL::SSL_VERIFY_PEER( ) => 'SSL_VERIFY_PEER, ie, check the certificate server' , ) ; -$IO::Socket::SSL::DEBUG = $sync->{debugssl} || 1 ; +$IO::Socket::SSL::DEBUG = defined( $sync->{debugssl} ) ? $sync->{debugssl} : 1 ; if ( $sync->{ssl1} or $sync->{ssl2} or $sync->{tls1} or $sync->{tls2}) { @@ -1229,15 +1334,15 @@ if ( $sync->{ssl2} ) { } - if ( $sync->{justconnect} ) { - justconnect( ) ; + justconnect( $sync ) ; myprint( debugmemory( $sync, " after justconnect() call" ) ) ; exit_clean( $sync, $EX_OK ) ; } -$sync->{user1} || missing_option( '--user1' ) ; -$sync->{user2} || missing_option( '--user2' ) ; + +$sync->{user1} || missing_option( $sync, '--user1' ) ; +$sync->{user2} || missing_option( $sync, '--user2' ) ; $syncinternaldates = defined $syncinternaldates ? $syncinternaldates : 1; @@ -1246,30 +1351,30 @@ $syncinternaldates = defined $syncinternaldates ? $syncinternaldates : 1; # Done because --delete1 --noexpunge1 is very dangerous on the second run: # the Deleted flag is then synced to all previously transferred messages. # So --delete1 implies --expunge1 is a better usability default behavior. -if ( $delete1 ) { - if ( ! defined $expunge1 ) { +if ( $sync->{ delete1 } ) { + if ( ! defined $sync->{ expunge1 } ) { myprint( "Info: turning on --expunge1 because --delete1 --noexpunge1 is very dangerous on the second run.\n" ) ; - $expunge1 = 1 ; + $sync->{ expunge1 } = 1 ; } myprint( "Info: if expunging after each message slows down too much the sync then use --noexpungeaftereach to speed up\n" ) ; } -if ( $uidexpunge2 and not Mail::IMAPClient->can( 'uidexpunge' ) ) { - myprint( "Failure: uidexpunge not supported (IMAPClient release < 3.17), use --expunge2 instead\n" ) ; +if ( $sync->{ uidexpunge2 } and not Mail::IMAPClient->can( 'uidexpunge' ) ) { + myprint( "Failure: uidexpunge not supported (IMAPClient release < 3.17), use nothing or --expunge2 instead\n" ) ; exit_clean( $sync, $EX_SOFTWARE ) ; } -if ( ( $delete2 or $delete2duplicates ) and not defined $uidexpunge2 ) { +if ( ( $sync->{ delete2 } or $sync->{ delete2duplicates } ) and not defined $sync->{ uidexpunge2 } ) { if ( Mail::IMAPClient->can( 'uidexpunge' ) ) { myprint( "Info: will act as --uidexpunge2\n" ) ; - $uidexpunge2 = 1 ; - }elsif ( not defined $expunge2 ) { + $sync->{ uidexpunge2 } = 1 ; + }elsif ( not defined $sync->{ expunge2 } ) { myprint( "Info: will act as --expunge2 (no uidexpunge support)\n" ) ; - $expunge2 = 1 ; + $sync->{ expunge2 } = 1 ; } } -if ( $delete1 and $delete2 ) { +if ( $sync->{ delete1 } and $sync->{ delete2 } ) { myprint( "Warning: using --delete1 and --delete2 together is almost always a bad idea, exiting imapsync\n" ) ; exit_clean( $sync, $EX_USAGE ) ; } @@ -1310,11 +1415,11 @@ $authmech1 = uc $authmech1; $authmech2 = uc $authmech2; if (defined $proxyauth1 && !$authuser1) { - missing_option( 'With --proxyauth1, --authuser1' ) ; + missing_option( $sync, 'With --proxyauth1, --authuser1' ) ; } if (defined $proxyauth2 && !$authuser2) { - missing_option( 'With --proxyauth2, --authuser2' ) ; + missing_option( $sync, 'With --proxyauth2, --authuser2' ) ; } $authuser1 ||= $sync->{user1}; @@ -1333,7 +1438,7 @@ myprint( "Host2: imap connection timeout is $sync->{h2}->{timeout} seconds\n" ) $syncacls = defined $syncacls ? $syncacls : 0 ; # No folders sizes if --justfolders, unless really wanted. -if ( $justfolders and not defined $foldersizes ) { $foldersizes = 0 ; } +if ( $sync->{ justfolders } and not defined $foldersizes ) { $foldersizes = 0 ; } $foldersizes = ( defined $foldersizes ) ? $foldersizes : 1 ; $foldersizesatend = ( defined $foldersizesatend ) ? $foldersizesatend : $foldersizes ; @@ -1365,18 +1470,14 @@ get_password1( $sync ) ; get_password2( $sync ) ; - $sync->{dry_message} = q{} ; if( $sync->{dry} ) { $sync->{dry_message} = "\t(not really since --dry mode)" ; } - $search1 ||= $search if ( $search ) ; $search2 ||= $search if ( $search ) ; - - if ( $disarmreadreceipts ) { push @regexmess, q{s{\A((?:[^\n]+\r\n)+|)(^Disposition-Notification-To:[^\n]*\n)(\r?\n|.*\n\r?\n)}{$1X-$2$3}ims} ; } @@ -1390,17 +1491,17 @@ if ( @pipemess and $pipemesscheck ) { my $string = pipemess( q{ }, @pipemess ) ; # string undef means something was bad. if ( not ( defined $string ) ) { - die_clean( "Error: one of --pipemess command is bad, check it\n" ) ; + exit_clean( $sync, $EX_USAGE, "Error: one of --pipemess command is bad, check it\n" ) ; } myprint( "Ok with each --pipemess @pipemess\n" ) ; } if ( $maxlinelengthcmd ) { - myprint( "Checking --maxlinelengthcmd command, $maxlinelengthcmd, with an space string.\n" ) ; + myprint( "Checking --maxlinelengthcmd command, $maxlinelengthcmd, with an space string.\n" ) ; my $string = pipemess( q{ }, $maxlinelengthcmd ) ; # string undef means something was bad. if ( not ( defined $string ) ) { - die_clean( "Error: --maxlinelengthcmd command is bad, check it\n" ) ; + exit_clean( $sync, $EX_USAGE, "Error: --maxlinelengthcmd command is bad, check it\n" ) ; } myprint( "Ok with --maxlinelengthcmd $maxlinelengthcmd\n" ) ; } @@ -1410,7 +1511,7 @@ if ( @regexmess ) { myprint( "Checking each --regexmess command with an space string.\n" ) ; # string undef means one of the eval regex was bad. if ( not ( defined $string ) ) { - die_clean( 'Error: one of --regexmess option is bad, check it' ) ; + exit_clean( $sync, $EX_USAGE, 'Error: one of --regexmess option is bad, check it' ) ; } myprint( "Ok with each --regexmess\n" ) ; } @@ -1420,7 +1521,7 @@ if ( @skipmess ) { my $match = skipmess( q{ } ) ; # match undef means one of the eval regex was bad. if ( not ( defined $match ) ) { - die_clean( 'Error: one of --skipmess option is bad, check it' ) ; + exit_clean( $sync, $EX_USAGE, 'Error: one of --skipmess option is bad, check it' ) ; } myprint( "Ok with each --skipmess\n" ) ; } @@ -1430,44 +1531,52 @@ if ( @regexflag ) { my $string = flags_regex( q{ } ) ; # string undef means one of the eval regex was bad. if ( not ( defined $string ) ) { - die_clean( 'Error: one of --regexflag option is bad, check it' ) ; + exit_clean( $sync, $EX_USAGE, 'Error: one of --regexflag option is bad, check it' ) ; } myprint( "Ok with each --regexflag\n" ) ; } -$sync->{imap1} = my $imap1 = login_imap( $sync->{host1}, $sync->{port1}, $sync->{user1}, $domain1, $sync->{password1}, +$sync->{imap1} = login_imap( $sync->{host1}, $sync->{port1}, $sync->{user1}, $domain1, $sync->{password1}, $debugimap1, $sync->{h1}->{timeout}, $fastio1, $sync->{ssl1}, $sync->{tls1}, $authmech1, $authuser1, $reconnectretry1, $proxyauth1, $uid1, $split1, 'Host1', $sync->{h1}, $sync ) ; -$sync->{imap2} = my $imap2 = login_imap( $sync->{host2}, $sync->{port2}, $sync->{user2}, $domain2, $sync->{password2}, +$sync->{imap2} = login_imap( $sync->{host2}, $sync->{port2}, $sync->{user2}, $domain2, $sync->{password2}, $debugimap2, $sync->{h2}->{timeout}, $fastio2, $sync->{ssl2}, $sync->{tls2}, $authmech2, $authuser2, $reconnectretry2, $proxyauth2, $uid2, $split2, 'Host2', $sync->{h2}, $sync ) ; -$debug and myprint( 'Host1 Buffer I/O: ', $imap1->Buffer(), "\n" ) ; -$debug and myprint( 'Host2 Buffer I/O: ', $imap2->Buffer(), "\n" ) ; +$sync->{ debug } and myprint( 'Host1 Buffer I/O: ', $sync->{imap1}->Buffer(), "\n" ) ; +$sync->{ debug } and myprint( 'Host2 Buffer I/O: ', $sync->{imap2}->Buffer(), "\n" ) ; -if ( ! $imap1->IsAuthenticated( ) ) { die_clean( 'Not authenticated on host1' ) ; } +if ( ! $sync->{imap1}->IsAuthenticated( ) ) { exit_clean( $sync, $EXIT_AUTHENTICATION_FAILURE, 'Not authenticated on host1' ) ; } myprint( "Host1: state Authenticated\n" ) ; -if ( ! $imap2->IsAuthenticated( ) ) { die_clean( 'Not authenticated on host2' ) ; } +if ( ! $sync->{imap2}->IsAuthenticated( ) ) { exit_clean( $sync, $EXIT_AUTHENTICATION_FAILURE, 'Not authenticated on host2' ) ; } myprint( "Host2: state Authenticated\n" ) ; -myprint( 'Host1 capability once authenticated: ', join(q{ }, @{ $imap1->capability() || [] }), "\n" ) ; -myprint( 'Host2 capability once authenticated: ', join(q{ }, @{ $imap2->capability() || [] }), "\n" ) ; +myprint( 'Host1 capability once authenticated: ', join(q{ }, @{ $sync->{imap1}->capability() || [] }), "\n" ) ; + +#myprint( Data::Dumper->Dump( [ $sync->{imap1} ] ) ) ; +#myprint( "imap4rev1: " . $sync->{imap1}->imap4rev1() . "\n" ) ; + +myprint( 'Host2 capability once authenticated: ', join(q{ }, @{ $sync->{imap2}->capability() || [] }), "\n" ) ; + # ID on by default since 1.832 $sync->{id} = defined $sync->{id} ? $sync->{id} : 1 ; imap_id_stuff( $sync ) ; -#quota( $imap1, 'h1', $sync ) ; # quota on host1 is useless and pollute host2 output. -quota( $imap2, 'h2', $sync ) ; +#quota( $sync, $sync->{imap1}, 'h1' ) ; # quota on host1 is useless and pollute host2 output. +quota( $sync, $sync->{imap2}, 'h2' ) ; -if ( $sync->{justlogin} ) { - $imap1->logout( ) ; - $imap2->logout( ) ; +maxsize_setting( $sync ) ; + +if ( $sync->{ justlogin } ) { + $sync->{imap1}->logout( ) ; + $sync->{imap2}->logout( ) ; + myprint( "Exiting because of --justlogin\n" ) ; exit_clean( $sync, $EX_OK ) ; } @@ -1490,14 +1599,14 @@ my $h1_folders_wanted_ct = 0 ; # counter of folders done. # All folders on host1 and host2 -@h1_folders_all = sort $imap1->folders( ) ; -@h2_folders_all = sort $imap2->folders( ) ; +@h1_folders_all = sort $sync->{imap1}->folders( ) ; +@h2_folders_all = sort $sync->{imap2}->folders( ) ; myprint( 'Host1: found ', scalar @h1_folders_all , " folders.\n" ) ; myprint( 'Host2: found ', scalar @h2_folders_all , " folders.\n" ) ; -foreach my $f ( @h1_folders_all ) { - $h1_folders_all{ $f } = 1 +foreach my $f ( @h1_folders_all ) { + $h1_folders_all{ $f } = 1 } foreach my $f ( @h2_folders_all ) { $h2_folders_all{ $f } = 1 ; @@ -1508,30 +1617,39 @@ $sync->{h1_folders_all} = \%h1_folders_all ; $sync->{h2_folders_all} = \%h2_folders_all ; $sync->{h2_folders_all_UPPER} = \%h2_folders_all_UPPER ; +private_folders_separators_and_prefixes( ) ; + + # Make a hash of subscribed folders in both servers. -for ( $imap1->subscribed( ) ) { $h1_subscribed_folder{ $_ } = 1 } ; -for ( $imap2->subscribed( ) ) { $h2_subscribed_folder{ $_ } = 1 } ; +for ( $sync->{imap1}->subscribed( ) ) { $h1_subscribed_folder{ $_ } = 1 } ; +for ( $sync->{imap2}->subscribed( ) ) { $h2_subscribed_folder{ $_ } = 1 } ; -if ( defined $subfolder2 ) { - unshift @regextrans2, - q(s,^$sync->{h2_prefix}(.*),$sync->{h2_prefix}${subfolder2}${h2_sep}$1,), - q(s,^INBOX$,$sync->{h2_prefix}${subfolder2}${h2_sep}INBOX,) ; +if ( defined $sync->{ subfolder1 } ) { + subfolder1( $sync ) ; +} + + + +if ( defined $sync->{ subfolder2 } ) { + subfolder2( $sync ) ; } if ( $fixInboxINBOX and ( my $reg = fix_Inbox_INBOX_mapping( \%h1_folders_all, \%h2_folders_all ) ) ) { - push @regextrans2, $reg ; + push @{ $sync->{ regextrans2 } }, $reg ; } -if ( ( $sync->{folder} and scalar @{ $sync->{folder} } ) - or $subscribed - or scalar @folderrec ) { + +if ( ( $sync->{ folder } and scalar @{ $sync->{ folder } } ) + or $subscribed + or scalar @folderrec ) +{ # folders given by option --folder - if ( $sync->{folder} and scalar @{ $sync->{folder} } ) { - add_to_requested_folders( @{ $sync->{folder} } ); + if ( $sync->{ folder } and scalar @{ $sync->{ folder } } ) { + add_to_requested_folders( @{ $sync->{ folder } } ) ; } # option --subscribed @@ -1540,16 +1658,18 @@ if ( ( $sync->{folder} and scalar @{ $sync->{folder} } ) } # option --folderrec - if (scalar @folderrec) { - foreach my $folderrec (@folderrec) { - add_to_requested_folders($imap1->folders($folderrec)); + if ( scalar @folderrec ) { + foreach my $folderrec ( @folderrec ) { + add_to_requested_folders( $sync->{imap1}->folders( $folderrec ) ) ; } } -} else { +} +else +{ # no include, no folder/subscribed/folderrec options => all folders if ( not scalar @include ) { myprint( "Including all folders found by default. Use --subscribed or --folder or --folderrec or --include to select specific folders. Use --exclude to unselect specific folders.\n" ) ; - add_to_requested_folders(@h1_folders_all); + add_to_requested_folders( @h1_folders_all ) ; } } @@ -1584,7 +1704,7 @@ if ( $sync->{ checkfoldersexist } ) { my @h1_folders_wanted_exist ; myprint( "Host1: Checking wanted folders exist. Use --nocheckfoldersexist to avoid this check (shared of public namespace targeted).\n" ) ; foreach my $folder ( @h1_folders_wanted ) { - ( $debug or $sync->{debugfolders} ) and myprint( "Checking $folder exists on host1\n" ) ; + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( "Checking $folder exists on host1\n" ) ; if ( ! exists $h1_folders_all{ $folder } ) { myprint( "Host1: warning! ignoring folder $folder because it is not in host1 whole folders list.\n" ) ; next ; @@ -1602,16 +1722,16 @@ if ( $sync->{ checkselectable } ) { my @h1_folders_wanted_selectable ; myprint( "Host1: Checking wanted folders are selectable. Use --nocheckselectable to avoid this check.\n" ) ; foreach my $folder ( @h1_folders_wanted ) { - ( $debug or $sync->{debugfolders} ) and myprint( "Checking $folder is selectable on host1\n" ) ; - # It does an imap command LIST "" $folder and then search for no \Noselect - if ( ! $imap1->selectable( $folder ) ) { + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( "Checking $folder is selectable on host1\n" ) ; + # It does an imap command LIST "" $folder and then search for no \Noselect + if ( ! $sync->{imap1}->selectable( $folder ) ) { myprint( "Host1: warning! ignoring folder $folder because it is not selectable\n" ) ; }else{ push @h1_folders_wanted_selectable, $folder ; } } @h1_folders_wanted = @h1_folders_wanted_selectable ; - ( $debug or $sync->{debugfolders} ) and myprint( 'Host1: checking folders took ', timenext( ), " s\n" ) ; + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( 'Host1: checking folders took ', timenext( ), " s\n" ) ; }else{ myprint( "Host1: Not checking that wanted folders are selectable. Remove --nocheckselectable to get this check.\n" ) ; } @@ -1619,20 +1739,9 @@ if ( $sync->{ checkselectable } ) { $sync->{h1_folders_wanted} = \@h1_folders_wanted ; -my( $h1_sep, $h2_sep ) ; -# what are the private folders separators for each server ? +# Old place of private_folders_separators_and_prefixes( ) call. +#private_folders_separators_and_prefixes( ) ; -( $debug or $sync->{debugfolders} ) and myprint( "Getting separators\n" ) ; -$h1_sep = get_separator( $imap1, $sep1, '--sep1', 'Host1', \@h1_folders_all ) ; -$h2_sep = get_separator( $imap2, $sep2, '--sep2', 'Host2', \@h2_folders_all ) ; - - -$sync->{ h1_prefix } = get_prefix( $imap1, $prefix1, '--prefix1', 'Host1', \@h1_folders_all ) ; -$sync->{ h2_prefix } = get_prefix( $imap2, $prefix2, '--prefix2', 'Host2', \@h2_folders_all ) ; - - -myprint( "Host1 separator and prefix: [$h1_sep][$sync->{ h1_prefix }]\n" ) ; -myprint( "Host2 separator and prefix: [$h2_sep][$sync->{ h2_prefix }]\n" ) ; # this hack is because LWP post does not pass well a hash in the $form parameter # but it does pass well an array @@ -1643,7 +1752,7 @@ automap( $sync ) ; foreach my $h1_fold ( @h1_folders_wanted ) { my $h2_fold ; - $h2_fold = imap2_folder_name( $h1_fold ) ; + $h2_fold = imap2_folder_name( $sync, $h1_fold ) ; $h2_folders_from_1_wanted{ $h2_fold }++ ; if ( 1 < $h2_folders_from_1_wanted{ $h2_fold } ) { $h2_folders_from_1_several{ $h2_fold }++ ; @@ -1653,7 +1762,7 @@ foreach my $h1_fold ( @h1_folders_wanted ) { foreach my $h1_fold ( @h1_folders_all ) { my $h2_fold ; - $h2_fold = imap2_folder_name( $h1_fold ) ; + $h2_fold = imap2_folder_name( $sync, $h1_fold ) ; $h2_folders_from_1_all{ $h2_fold }++ ; } @@ -1666,32 +1775,32 @@ All foldernames are presented between brackets like [X] where X is the foldernam When a foldername contains non-ASCII characters it is presented in the form [X] = [Y] where X is the imap foldername you have to use in command line options and -Y is the uft8 output just printed for convenience, to recognize it. +Y is the utf8 output just printed for convenience, to recognize it. END_LISTING myprint( - "Host1 folders list (first the raw imap format then the [X] = [Y]):\n", - $imap1->list( ), + "Host1: folders list (first the raw imap format then the [X] = [Y]):\n", + $sync->{imap1}->list( ), "\n", jux_utf8_list( @h1_folders_all ), "\n", - "Host2 folders list (first the raw imap format then the [X] = [Y]):\n", - $imap2->list( ), + "Host2: folders list (first the raw imap format then the [X] = [Y]):\n", + $sync->{imap2}->list( ), "\n", jux_utf8_list( @h2_folders_all ), - "\n", - q{} + "\n", + q{} ) ; if ( $subscribed ) { - myprint( + myprint( 'Host1 subscribed folders list: ', jux_utf8_list( sort keys %h1_subscribed_folder ), "\n", ) ; } - + my @h2_folders_not_in_1; @h2_folders_not_in_1 = list_folders_in_2_not_in_1( ) ; @@ -1731,15 +1840,20 @@ exit_clean( $sync, $EX_OK ) if ( $sync->{justautomap} ) ; debugsleep( $sync ) ; if ( $foldersizes ) { - foldersizes_on_h1h2( ) ; + foldersizes_on_h1h2( $sync ) ; } -exit_clean( $sync, $EX_OK ) if ( $justfoldersizes ) ; + +if ( $sync->{ justfoldersizes } ) +{ + myprint( "Exiting because of --justfoldersizes\n" ) ; + exit_clean( $sync, $EX_OK ) ; +} $sync->{stats} = 1 ; -if ( $sync->{'delete1emptyfolders'} ) { +if ( $sync->{ delete1emptyfolders } ) { delete1emptyfolders( $sync ) ; } @@ -1755,116 +1869,128 @@ $sync->{begin_transfer_time} = time ; my %uid_candidate_for_deletion ; my %uid_candidate_no_deletion ; -my %h2_folders_of_md5 = ( ) ; +$sync->{ h2_folders_of_md5 } = { } ; FOLDER: foreach my $h1_fold ( @h1_folders_wanted ) { - if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } - my $h2_fold = imap2_folder_name( $h1_fold ) ; + my $h2_fold = imap2_folder_name( $sync, $h1_fold ) ; $h1_folders_wanted_ct++ ; myprintf( "Folder %7s %-35s -> %-35s\n", "$h1_folders_wanted_ct/$h1_folders_wanted_nb", jux_utf8( $h1_fold ), jux_utf8( $h2_fold ) ) ; myprint( debugmemory( $sync, " at folder loop" ) ) ; - + # host1 can not be fetched read only, select is needed because of expunge. - select_folder( $imap1, $h1_fold, 'Host1' ) or next FOLDER ; + select_folder( $sync, $sync->{imap1}, $h1_fold, 'Host1' ) or next FOLDER ; debugsleep( $sync ) ; - my $h1_fold_nb_messages = count_from_select( $imap1->History ) ; - myprint( "Host1 folder [$h1_fold] has $h1_fold_nb_messages messages in total (mentioned by SELECT)\n" ) ; + my $h1_fold_nb_messages = count_from_select( $sync->{imap1}->History ) ; + myprint( "Host1: folder [$h1_fold] has $h1_fold_nb_messages messages in total (mentioned by SELECT)\n" ) ; if ( $skipemptyfolders and 0 == $h1_fold_nb_messages ) { - myprint( "Skipping empty host1 folder [$h1_fold]\n" ) ; + myprint( "Host1: skipping empty host1 folder [$h1_fold]\n" ) ; next FOLDER ; } + # Code added from https://github.com/imapsync/imapsync/issues/95 + # Thanks jh1995 + if ( $skipemptyfolders ) { + my $h1_msgs_all_hash_ref_tmp = { } ; + my @h1_msgs_tmp = select_msgs( $sync->{imap1}, $h1_msgs_all_hash_ref_tmp, $search1, $h1_fold ) ; + my $h1_fold_nb_messages_tmp = scalar( @h1_msgs_tmp ) ; + if ( 0 == $h1_fold_nb_messages_tmp ) { + myprint( "Host1: skipping empty host1 folder [$h1_fold] (0 message found by SEARCH)\n" ) ; + next FOLDER ; + } + } + if ( ! exists $h2_folders_all{ $h2_fold } ) { - create_folder( $imap2, $h2_fold, $h1_fold ) or next FOLDER ; + create_folder( $sync, $sync->{imap2}, $h2_fold, $h1_fold ) or next FOLDER ; } acls_sync( $h1_fold, $h2_fold ) ; # Sometimes the folder on host2 is listed (it exists) but is # not selectable but becomes selectable by a create (Gmail) - select_folder( $imap2, $h2_fold, 'Host2' ) - or ( create_folder( $imap2, $h2_fold, $h1_fold ) - and select_folder( $imap2, $h2_fold, 'Host2' ) ) + select_folder( $sync, $sync->{imap2}, $h2_fold, 'Host2' ) + or ( create_folder( $sync, $sync->{imap2}, $h2_fold, $h1_fold ) + and select_folder( $sync, $sync->{imap2}, $h2_fold, 'Host2' ) ) or next FOLDER ; - my @select_results = $imap2->Results( ) ; + my @select_results = $sync->{imap2}->Results( ) ; my $h2_fold_nb_messages = count_from_select( @select_results ) ; - myprint( "Host2 folder [$h2_fold] has $h2_fold_nb_messages messages in total (mentioned by SELECT)\n" ) ; + myprint( "Host2: folder [$h2_fold] has $h2_fold_nb_messages messages in total (mentioned by SELECT)\n" ) ; my $permanentflags2 = permanentflags( @select_results ) ; - myprint( "Host2 folder [$h2_fold] permanentflags: $permanentflags2\n" ) ; + myprint( "Host2: folder [$h2_fold] permanentflags: $permanentflags2\n" ) ; - if ( $expunge1 ){ + if ( $sync->{ expunge1 } ) + { myprint( "Host1: Expunging $h1_fold $sync->{dry_message}\n" ) ; - if ( ! $sync->{dry} ) { $imap1->expunge( ) } ; + if ( ! $sync->{dry} ) { $sync->{imap1}->expunge( ) } ; } if ( ( ( $subscribe and exists $h1_subscribed_folder{ $h1_fold } ) or $subscribeall ) - and not exists $h2_subscribed_folder{ $h2_fold } ) { + and not exists $h2_subscribed_folder{ $h2_fold } ) + { myprint( "Host2: Subscribing to folder $h2_fold\n" ) ; - if ( ! $sync->{dry} ) { $imap2->subscribe( $h2_fold ) } ; + if ( ! $sync->{dry} ) { $sync->{imap2}->subscribe( $h2_fold ) } ; } - next FOLDER if ( $justfolders ) ; + next FOLDER if ( $sync->{ justfolders } ) ; - if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } my $h1_msgs_all_hash_ref = { } ; - my @h1_msgs = select_msgs( $imap1, $h1_msgs_all_hash_ref, $search1, $sync->{abletosearch1}, $h1_fold ); + my @h1_msgs = select_msgs( $sync->{imap1}, $h1_msgs_all_hash_ref, $search1, $sync->{abletosearch1}, $h1_fold ); - if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } my $h1_msgs_nb = scalar @h1_msgs ; - $h1{ $h1_fold }{ 'messages_nb' } = $h1_msgs_nb ; - myprint( "Host1 folder [$h1_fold] considering $h1_msgs_nb messages\n" ) ; - ( $debug or $debuglist ) and myprint( "Host1 folder [$h1_fold] considering $h1_msgs_nb messages, LIST gives: @h1_msgs\n" ) ; - $debug and myprint( "Host1 selecting messages of folder [$h1_fold] took ", timenext(), " s\n" ) ; + myprint( "Host1: folder [$h1_fold] considering $h1_msgs_nb messages\n" ) ; + ( $sync->{ debug } or $debuglist ) and myprint( "Host1: folder [$h1_fold] considering $h1_msgs_nb messages, LIST gives: @h1_msgs\n" ) ; + $sync->{ debug } and myprint( "Host1: selecting messages of folder [$h1_fold] took ", timenext(), " s\n" ) ; my $h2_msgs_all_hash_ref = { } ; - my @h2_msgs = select_msgs( $imap2, $h2_msgs_all_hash_ref, $search2, $sync->{abletosearch2}, $h2_fold ) ; + my @h2_msgs = select_msgs( $sync->{imap2}, $h2_msgs_all_hash_ref, $search2, $sync->{abletosearch2}, $h2_fold ) ; - if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } my $h2_msgs_nb = scalar @h2_msgs ; - $h2{ $h2_fold }{ 'messages_nb' } = $h2_msgs_nb ; - myprint( "Host2 folder [$h2_fold] considering $h2_msgs_nb messages\n" ) ; - ( $debug or $debuglist ) and myprint( "Host2 folder [$h2_fold] considering $h2_msgs_nb messages, LIST gives: @h2_msgs\n" ) ; - $debug and myprint( "Host2 selecting messages of folder [$h2_fold] took ", timenext(), " s\n" ) ; + myprint( "Host2: folder [$h2_fold] considering $h2_msgs_nb messages\n" ) ; + ( $sync->{ debug } or $debuglist ) and myprint( "Host2: folder [$h2_fold] considering $h2_msgs_nb messages, LIST gives: @h2_msgs\n" ) ; + $sync->{ debug } and myprint( "Host2: selecting messages of folder [$h2_fold] took ", timenext(), " s\n" ) ; my $cache_base = "$sync->{ tmpdir }/imapsync_cache/" ; my $cache_dir = cache_folder( $cache_base, "$sync->{host1}/$sync->{user1}/$sync->{host2}/$sync->{user2}", $h1_fold, $h2_fold ) ; my ( $cache_1_2_ref, $cache_2_1_ref ) = ( {}, {} ) ; - my $h1_uidvalidity = $imap1->uidvalidity( ) || q{} ; - my $h2_uidvalidity = $imap2->uidvalidity( ) || q{} ; + my $h1_uidvalidity = $sync->{imap1}->uidvalidity( ) || q{} ; + my $h2_uidvalidity = $sync->{imap2}->uidvalidity( ) || q{} ; - if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } if ( $usecache ) { - myprint( "cache directory: $cache_dir\n" ) ; + myprint( "Local cache directory: $cache_dir ( " . length( $cache_dir ) . " characters long )\n" ) ; mkpath( "$cache_dir" ) ; ( $cache_1_2_ref, $cache_2_1_ref ) = get_cache( $cache_dir, \@h1_msgs, \@h2_msgs, $h1_msgs_all_hash_ref, $h2_msgs_all_hash_ref ) ; myprint( 'CACHE h1 h2: ', scalar keys %{ $cache_1_2_ref } , " files\n" ) ; - $debug and myprint( '[', + $sync->{ debug } and myprint( '[', map ( { "$_->$cache_1_2_ref->{$_} " } keys %{ $cache_1_2_ref } ), " ]\n" ) ; } - my %h1_hash = () ; - my %h2_hash = () ; + my %h1_hash = ( ) ; + my %h2_hash = ( ) ; my ( %h1_msgs, %h2_msgs ) ; - @h1_msgs{ @h1_msgs } = (); - @h2_msgs{ @h2_msgs } = (); + @h1_msgs{ @h1_msgs } = ( ) ; + @h2_msgs{ @h2_msgs } = ( ) ; my @h1_msgs_in_cache = sort { $a <=> $b } keys %{ $cache_1_2_ref } ; my @h2_msgs_in_cache = keys %{ $cache_2_1_ref } ; @@ -1892,112 +2018,128 @@ FOLDER: foreach my $h1_fold ( @h1_folders_wanted ) { #myprint( "delete2: @h2_msgs_delete2_not_in_cache\n" ) ; } - $debug and myprint( "Host1 parsing headers of folder [$h1_fold]\n" ) ; + $sync->{ debug } and myprint( "Host1: parsing headers of folder [$h1_fold]\n" ) ; my ($h1_heads_ref, $h1_fir_ref) = ({}, {}); - $h1_heads_ref = $imap1->parse_headers([@h1_msgs_not_in_cache], @useheader) if (@h1_msgs_not_in_cache); - $debug and myprint( "Host1 parsing headers of folder [$h1_fold] took ", timenext(), " s\n" ) ; + $h1_heads_ref = $sync->{imap1}->parse_headers([@h1_msgs_not_in_cache], @useheader) if (@h1_msgs_not_in_cache); + $sync->{ debug } and myprint( "Host1: parsing headers of folder [$h1_fold] took ", timenext(), " s\n" ) ; @{ $h1_fir_ref }{@h1_msgs} = ( undef ) ; - $debug and myprint( "Host1 getting flags idate and sizes of folder [$h1_fold]\n" ) ; - if ( $sync->{abletosearch1} ) { - $h1_fir_ref = $imap1->fetch_hash( \@h1_msgs, 'FLAGS', 'INTERNALDATE', 'RFC822.SIZE', $h1_fir_ref ) - if ( @h1_msgs ) ; - }else{ - my $uidnext = $imap1->uidnext( $h1_fold ) || $uidnext_default ; - my $fetch_hash_uids = $fetch_hash_set || "1:$uidnext" ; - $h1_fir_ref = $imap1->fetch_hash( $fetch_hash_uids, 'FLAGS', 'INTERNALDATE', 'RFC822.SIZE', $h1_fir_ref ) + $sync->{ debug } and myprint( "Host1: getting flags idate and sizes of folder [$h1_fold]\n" ) ; + + my @h1_common_fetch_param = ( 'FLAGS', 'INTERNALDATE', 'RFC822.SIZE' ) ; + if ( $sync->{ synclabels } or $sync->{ resynclabels } ) { push @h1_common_fetch_param, 'X-GM-LABELS' ; } + + if ( $sync->{abletosearch1} ) + { + $h1_fir_ref = $sync->{imap1}->fetch_hash( \@h1_msgs, @h1_common_fetch_param, $h1_fir_ref ) if ( @h1_msgs ) ; } - $debug and myprint( "Host1 getting flags idate and sizes of folder [$h1_fold] took ", timenext(), " s\n" ) ; - if ( ! $h1_fir_ref ) { - my $error = join( q{}, "Host1 folder $h1_fold: Could not fetch_hash ", - scalar @h1_msgs, ' msgs: ', $imap1->LastError || q{}, "\n" ) ; + else + { + my $uidnext = $sync->{imap1}->uidnext( $h1_fold ) || $uidnext_default ; + my $fetch_hash_uids = $fetch_hash_set || "1:$uidnext" ; + $h1_fir_ref = $sync->{imap1}->fetch_hash( $fetch_hash_uids, @h1_common_fetch_param, $h1_fir_ref ) + if ( @h1_msgs ) ; + } + + $sync->{ debug } and myprint( "Host1: getting flags idate and sizes of folder [$h1_fold] took ", timenext(), " s\n" ) ; + if ( ! $h1_fir_ref ) + { + my $error = join( q{}, "Host1: folder $h1_fold : Could not fetch_hash ", + scalar @h1_msgs, ' msgs: ', $sync->{imap1}->LastError || q{}, "\n" ) ; errors_incr( $sync, $error ) ; next FOLDER ; } my @h1_msgs_duplicate; - foreach my $m (@h1_msgs_not_in_cache) { - my $rc = parse_header_msg( $sync, $imap1, $m, $h1_heads_ref, $h1_fir_ref, 'Host1', \%h1_hash ) ; - if ( ! defined $rc ) { + foreach my $m ( @h1_msgs_not_in_cache ) + { + my $rc = parse_header_msg( $sync, $sync->{imap1}, $m, $h1_heads_ref, $h1_fir_ref, 'Host1', \%h1_hash ) ; + if ( ! defined $rc ) + { my $h1_size = $h1_fir_ref->{$m}->{'RFC822.SIZE'} || 0; - myprint( "Host1 $h1_fold/$m size $h1_size ignored (no wanted headers so we ignore this message. To solve this: use --addheader)\n" ) ; - $total_bytes_skipped += $h1_size; - $nb_msg_skipped += 1; - $h1_nb_msg_noheader +=1; - $h1_nb_msg_processed +=1 ; - } elsif(0 == $rc) { + myprint( "Host1: $h1_fold/$m size $h1_size ignored (no wanted headers so we ignore this message. To solve this: use --addheader)\n" ) ; + $sync->{ total_bytes_skipped } += $h1_size ; + $sync->{ nb_msg_skipped } += 1 ; + $sync->{ h1_nb_msg_noheader } +=1 ; + $sync->{ h1_nb_msg_processed } +=1 ; + } elsif(0 == $rc) + { # duplicate push @h1_msgs_duplicate, $m; # duplicate, same id same size? my $h1_size = $h1_fir_ref->{$m}->{'RFC822.SIZE'} || 0; - $nb_msg_skipped += 1; - $h1_total_bytes_duplicate += $h1_size; + $sync->{ nb_msg_skipped } += 1; $h1_nb_msg_duplicate += 1; - $h1_nb_msg_processed +=1 ; + $sync->{ h1_nb_msg_processed } +=1 ; } } my $h1_msgs_duplicate_nb = scalar @h1_msgs_duplicate ; - $h1{ $h1_fold }{ 'duplicates_nb' } = $h1_msgs_duplicate_nb ; - $debug and myprint( "Host1 selected: $h1_msgs_nb duplicates: $h1_msgs_duplicate_nb\n" ) ; - $debug and myprint( 'Host1 whole time parsing headers took ', timenext(), " s\n" ) ; - # Getting headers and metada can be so long that host2 might be disconnected here + myprint( "Host1: folder [$h1_fold] selected $h1_msgs_nb messages, duplicates $h1_msgs_duplicate_nb\n" ) ; + + $sync->{ debug } and myprint( 'Host1: whole time parsing headers took ', timenext(), " s\n" ) ; + # Getting headers and metada can be so long that host2 might be disconnected here if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } - $debug and myprint( "Host2 parsing headers of folder [$h2_fold]\n" ) ; + $sync->{ debug } and myprint( "Host2: parsing headers of folder [$h2_fold]\n" ) ; my ($h2_heads_ref, $h2_fir_ref) = ( {}, {} ); - $h2_heads_ref = $imap2->parse_headers([@h2_msgs_not_in_cache], @useheader) if (@h2_msgs_not_in_cache); - $debug and myprint( "Host2 parsing headers of folder [$h2_fold] took ", timenext(), " s\n" ) ; + $h2_heads_ref = $sync->{imap2}->parse_headers([@h2_msgs_not_in_cache], @useheader) if (@h2_msgs_not_in_cache); + $sync->{ debug } and myprint( "Host2: parsing headers of folder [$h2_fold] took ", timenext(), " s\n" ) ; - $debug and myprint( "Host2 getting flags idate and sizes of folder [$h2_fold]\n" ) ; + $sync->{ debug } and myprint( "Host2: getting flags idate and sizes of folder [$h2_fold]\n" ) ; @{ $h2_fir_ref }{@h2_msgs} = ( ); # fetch_hash can select by uid with last arg as ref + my @h2_common_fetch_param = ( 'FLAGS', 'INTERNALDATE', 'RFC822.SIZE' ) ; + if ( $sync->{ synclabels } or $sync->{ resynclabels } ) { push @h2_common_fetch_param, 'X-GM-LABELS' ; } + if ( $sync->{abletosearch2} and scalar( @h2_msgs ) ) { - $h2_fir_ref = $imap2->fetch_hash( \@h2_msgs, 'FLAGS', 'INTERNALDATE', 'RFC822.SIZE', $h2_fir_ref) ; + $h2_fir_ref = $sync->{imap2}->fetch_hash( \@h2_msgs, @h2_common_fetch_param, $h2_fir_ref) ; }else{ - my $uidnext = $imap2->uidnext( $h2_fold ) || $uidnext_default ; + my $uidnext = $sync->{imap2}->uidnext( $h2_fold ) || $uidnext_default ; my $fetch_hash_uids = $fetch_hash_set || "1:$uidnext" ; - $h2_fir_ref = $imap2->fetch_hash( $fetch_hash_uids, 'FLAGS', 'INTERNALDATE', 'RFC822.SIZE', $h2_fir_ref ) + $h2_fir_ref = $sync->{imap2}->fetch_hash( $fetch_hash_uids, @h2_common_fetch_param, $h2_fir_ref ) if ( @h2_msgs ) ; } - $debug and myprint( "Host2 getting flags idate and sizes of folder [$h2_fold] took ", timenext(), " s\n" ) ; + $sync->{ debug } and myprint( "Host2: getting flags idate and sizes of folder [$h2_fold] took ", timenext(), " s\n" ) ; my @h2_msgs_duplicate; foreach my $m (@h2_msgs_not_in_cache) { - my $rc = parse_header_msg( $sync, $imap2, $m, $h2_heads_ref, $h2_fir_ref, 'Host2', \%h2_hash ) ; + my $rc = parse_header_msg( $sync, $sync->{imap2}, $m, $h2_heads_ref, $h2_fir_ref, 'Host2', \%h2_hash ) ; my $h2_size = $h2_fir_ref->{$m}->{'RFC822.SIZE'} || 0 ; if (! defined $rc ) { - myprint( "Host2 $h2_fold/$m size $h2_size ignored (no wanted headers so we ignore this message)\n" ) ; + myprint( "Host2: $h2_fold/$m size $h2_size ignored (no wanted headers so we ignore this message)\n" ) ; $h2_nb_msg_noheader += 1 ; } elsif( 0 == $rc ) { # duplicate $h2_nb_msg_duplicate += 1 ; - $h2_total_bytes_duplicate += $h2_size ; push @h2_msgs_duplicate, $m ; } } # %h2_folders_of_md5 foreach my $md5 ( keys %h2_hash ) { - $h2_folders_of_md5{ $md5 }->{ $h2_fold } ++ ; + $sync->{ h2_folders_of_md5 }->{ $md5 }->{ $h2_fold } ++ ; + } + # %h1_folders_of_md5 + foreach my $md5 ( keys %h1_hash ) { + $sync->{ h1_folders_of_md5 }->{ $md5 }->{ $h2_fold } ++ ; } my $h2_msgs_duplicate_nb = scalar @h2_msgs_duplicate ; - $h2{ $h2_fold }{ 'duplicates_nb' } = $h2_msgs_duplicate_nb ; - myprint( "Host2 folder $h2_fold selected: $h2_msgs_nb messages, duplicates: $h2_msgs_duplicate_nb\n" ) - if ( $debug or $delete2duplicates or $h2_msgs_duplicate_nb ) ; - $debug and myprint( 'Host2 whole time parsing headers took ', timenext( ), " s\n" ) ; + myprint( "Host2: folder [$h2_fold] selected $h2_msgs_nb messages, duplicates $h2_msgs_duplicate_nb\n" ) ; - $debug and myprint( "++++ Verifying [$h1_fold] -> [$h2_fold]\n" ) ; + $sync->{ debug } and myprint( 'Host2 whole time parsing headers took ', timenext( ), " s\n" ) ; + + $sync->{ debug } and myprint( "++++ Verifying [$h1_fold] -> [$h2_fold]\n" ) ; # messages in host1 that are not in host2 my @h1_hash_keys_sorted_by_uid @@ -2008,30 +2150,31 @@ FOLDER: foreach my $h1_fold ( @h1_folders_wanted ) { my @h2_hash_keys_sorted_by_uid = sort {$h2_hash{$a}{'m'} <=> $h2_hash{$b}{'m'}} keys %h2_hash; + # Deletions on account2. - if( $delete2duplicates and not exists $h2_folders_from_1_several{ $h2_fold } ) { + if( $sync->{ delete2duplicates } and not exists $h2_folders_from_1_several{ $h2_fold } ) { my @h2_expunge ; foreach my $h2_msg ( @h2_msgs_duplicate ) { - myprint( "msg $h2_fold/$h2_msg marked \\Deleted [duplicate] on host2 $sync->{dry_message}\n" ) ; - push @h2_expunge, $h2_msg if $uidexpunge2 ; + myprint( "Host2: msg $h2_fold/$h2_msg marked \\Deleted [duplicate] on host2 $sync->{dry_message}\n" ) ; + push @h2_expunge, $h2_msg if $sync->{ uidexpunge2 } ; if ( ! $sync->{dry} ) { - $imap2->delete_message( $h2_msg ) ; + $sync->{imap2}->delete_message( $h2_msg ) ; $h2_nb_msg_deleted += 1 ; } } my $cnt = scalar @h2_expunge ; - if( @h2_expunge and not $expunge2 ) { + if( @h2_expunge and not $sync->{ expunge2 } ) { myprint( "Host2: UidExpunging $cnt message(s) in folder $h2_fold $sync->{dry_message}\n" ) ; - $imap2->uidexpunge( \@h2_expunge ) if ! $sync->{dry} ; + $sync->{imap2}->uidexpunge( \@h2_expunge ) if ! $sync->{dry} ; } - if ( $expunge2 ){ + if ( $sync->{ expunge2 } ){ myprint( "Host2: Expunging folder $h2_fold $sync->{dry_message}\n" ) ; - $imap2->expunge( ) if ! $sync->{dry} ; + $sync->{imap2}->expunge( ) if ! $sync->{dry} ; } } - if( $delete2 and not exists $h2_folders_from_1_several{ $h2_fold } ) { + if( $sync->{ delete2 } and not exists $h2_folders_from_1_several{ $h2_fold } ) { # No host1 folders f1a f1b ... going all to same f2 (via --regextrans2) my @h2_expunge; foreach my $m_id (@h2_hash_keys_sorted_by_uid) { @@ -2042,35 +2185,35 @@ FOLDER: foreach my $h1_fold ( @h1_folders_wanted ) { my $isdel = $h2_flags =~ /\B\\Deleted\b/x ? 1 : 0; myprint( "Host2: msg $h2_fold/$h2_msg marked \\Deleted on host2 [$m_id] $sync->{dry_message}\n" ) if ! $isdel; - push @h2_expunge, $h2_msg if $uidexpunge2; + push @h2_expunge, $h2_msg if $sync->{ uidexpunge2 }; if ( ! ( $sync->{dry} or $isdel ) ) { - $imap2->delete_message($h2_msg); + $sync->{imap2}->delete_message($h2_msg); $h2_nb_msg_deleted += 1; } } } foreach my $h2_msg ( @h2_msgs_delete2_not_in_cache ) { myprint( "Host2: msg $h2_fold/$h2_msg marked \\Deleted [not in cache] on host2 $sync->{dry_message}\n" ) ; - push @h2_expunge, $h2_msg if $uidexpunge2; + push @h2_expunge, $h2_msg if $sync->{ uidexpunge2 }; if ( ! $sync->{dry} ) { - $imap2->delete_message($h2_msg); + $sync->{imap2}->delete_message($h2_msg); $h2_nb_msg_deleted += 1; } } my $cnt = scalar @h2_expunge ; - if( @h2_expunge and not $expunge2 ) { + if( @h2_expunge and not $sync->{ expunge2 } ) { myprint( "Host2: UidExpunging $cnt message(s) in folder $h2_fold $sync->{dry_message}\n" ) ; - $imap2->uidexpunge( \@h2_expunge ) if ! $sync->{dry} ; + $sync->{imap2}->uidexpunge( \@h2_expunge ) if ! $sync->{dry} ; } - if ( $expunge2 ) { + if ( $sync->{ expunge2 } ) { myprint( "Host2: Expunging folder $h2_fold $sync->{dry_message}\n" ) ; - $imap2->expunge( ) if ! $sync->{dry} ; + $sync->{imap2}->expunge( ) if ! $sync->{dry} ; } } - if( $delete2 and exists $h2_folders_from_1_several{ $h2_fold } ) { - myprint( "Host2 folder $h2_fold $h2_folders_from_1_several{ $h2_fold } folders left to sync there\n" ) ; + if( $sync->{ delete2 } and exists $h2_folders_from_1_several{ $h2_fold } ) { + myprint( "Host2: folder $h2_fold $h2_folders_from_1_several{ $h2_fold } folders left to sync there\n" ) ; my @h2_expunge; foreach my $m_id ( @h2_hash_keys_sorted_by_uid ) { my $h2_msg = $h2_hash{ $m_id }{ 'm' } ; @@ -2078,11 +2221,11 @@ FOLDER: foreach my $h1_fold ( @h1_folders_wanted ) { my $h2_flags = $h2_hash{ $m_id }{ 'F' } || q{} ; my $isdel = $h2_flags =~ /\B\\Deleted\b/x ? 1 : 0 ; if ( ! $isdel ) { - $debug and myprint( "Host2: msg $h2_fold/$h2_msg candidate for deletion [$m_id]\n" ) ; + $sync->{ debug } and myprint( "Host2: msg $h2_fold/$h2_msg candidate for deletion [$m_id]\n" ) ; $uid_candidate_for_deletion{ $h2_fold }{ $h2_msg }++ ; } }else{ - $debug and myprint( "Host2: msg $h2_fold/$h2_msg will cancel deletion [$m_id]\n" ) ; + $sync->{ debug } and myprint( "Host2: msg $h2_fold/$h2_msg will cancel deletion [$m_id]\n" ) ; $uid_candidate_no_deletion{ $h2_fold }{ $h2_msg }++ ; } } @@ -2101,14 +2244,14 @@ FOLDER: foreach my $h1_fold ( @h1_folders_wanted ) { # last host1 folder going to $h2_fold myprint( "Last host1 folder going to $h2_fold\n" ) ; foreach my $h2_msg ( keys %{ $uid_candidate_for_deletion{ $h2_fold } } ) { - $debug and myprint( "Host2: msg $h2_fold/$h2_msg candidate for deletion\n" ) ; + $sync->{ debug } and myprint( "Host2: msg $h2_fold/$h2_msg candidate for deletion\n" ) ; if ( exists $uid_candidate_no_deletion{ $h2_fold }{ $h2_msg } ) { - $debug and myprint( "Host2: msg $h2_fold/$h2_msg canceled deletion\n" ) ; + $sync->{ debug } and myprint( "Host2: msg $h2_fold/$h2_msg canceled deletion\n" ) ; }else{ myprint( "Host2: msg $h2_fold/$h2_msg marked \\Deleted $sync->{dry_message}\n" ) ; - push @h2_expunge, $h2_msg if $uidexpunge2 ; + push @h2_expunge, $h2_msg if $sync->{ uidexpunge2 } ; if ( ! $sync->{dry} ) { - $imap2->delete_message( $h2_msg ) ; + $sync->{imap2}->delete_message( $h2_msg ) ; $h2_nb_msg_deleted += 1 ; } } @@ -2116,133 +2259,177 @@ FOLDER: foreach my $h1_fold ( @h1_folders_wanted ) { } my $cnt = scalar @h2_expunge ; - if( @h2_expunge and not $expunge2 ) { + if( @h2_expunge and not $sync->{ expunge2 } ) { myprint( "Host2: UidExpunging $cnt message(s) in folder $h2_fold $sync->{dry_message}\n" ) ; - $imap2->uidexpunge( \@h2_expunge ) if ! $sync->{dry} ; + $sync->{imap2}->uidexpunge( \@h2_expunge ) if ! $sync->{dry} ; } - if ( $expunge2 ) { + if ( $sync->{ expunge2 } ) { myprint( "Host2: Expunging host2 folder $h2_fold $sync->{dry_message}\n" ) ; - $imap2->expunge( ) if ! $sync->{dry} ; + $sync->{imap2}->expunge( ) if ! $sync->{dry} ; } $h2_folders_from_1_several{ $h2_fold }-- ; } - my $h2_uidnext = $imap2->uidnext( $h2_fold ) ; - $debug and myprint( "Host2 uidnext: $h2_uidnext\n" ) ; + my $h2_uidnext = $sync->{imap2}->uidnext( $h2_fold ) ; + $sync->{ debug } and myprint( "Host2: uidnext is $h2_uidnext\n" ) ; $h2_uidguess = $h2_uidnext ; # Getting host2 headers, metada and delete2 stuff can be so long that host1 might be disconnected here if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + my @h1_msgs_to_delete ; MESS: foreach my $m_id (@h1_hash_keys_sorted_by_uid) { if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } - #myprint( "h1_nb_msg_processed: $h1_nb_msg_processed\n" ) ; + #myprint( "h1_nb_msg_processed: $sync->{ h1_nb_msg_processed }\n" ) ; my $h1_size = $h1_hash{$m_id}{'s'}; my $h1_msg = $h1_hash{$m_id}{'m'}; my $h1_idate = $h1_hash{$m_id}{'D'}; + #my $labels = labels( $sync->{imap1}, $h1_msg ) ; + #print "LABELS: $labels\n" ; + if ( ( not exists $h2_hash{ $m_id } ) - and ( not ( exists $h2_folders_of_md5{ $m_id } ) - or not $skipcrossduplicates ) ) { + and ( not ( exists $sync->{ h2_folders_of_md5 }->{ $m_id } ) + or not $skipcrossduplicates ) ) + { # copy my $h2_msg = copy_message( $sync, $h1_msg, $h1_fold, $h2_fold, $h1_fir_ref, $permanentflags2, $cache_dir ) ; - $h2_folders_of_md5{ $m_id }->{ $h2_fold } ++ ; - if( $delete2 and ( exists $h2_folders_from_1_several{ $h2_fold } ) and $h2_msg ) { + if ( $h2_msg and $sync->{ delete1 } and not $sync->{ expungeaftereach } ) { + # not expunged + push @h1_msgs_to_delete, $h1_msg ; + } + + # A bug here with imapsync 1.920, fixed in 1.921 + # Added $h2_msg in the condition. Errors of APPEND were not counted as missing messages on host2! + if ( $h2_msg and not $sync->{ dry } ) + { + $sync->{ h2_folders_of_md5 }->{ $m_id }->{ $h2_fold } ++ ; + } + + # + if( $sync->{ delete2 } and ( exists $h2_folders_from_1_several{ $h2_fold } ) and $h2_msg ) { myprint( "Host2: msg $h2_fold/$h2_msg will cancel deletion [fresh copy] on host2\n" ) ; $uid_candidate_no_deletion{ $h2_fold }{ $h2_msg }++ ; } - last FOLDER if total_bytes_max_reached( ) ; + + if ( total_bytes_max_reached( $sync ) ) { + # a bug when using --delete1 --noexpungeaftereach + # same thing below on all total_bytes_max_reached! + last FOLDER ; + } next MESS; } - else{ + else + { # already on host2 - if ( exists $h2_hash{ $m_id } ) { + if ( exists $h2_hash{ $m_id } ) + { my $h2_msg = $h2_hash{$m_id}{'m'} ; - $debug and myprint( "Host1 found msg $h1_fold/$h1_msg equals Host2 $h2_fold/$h2_msg\n" ) ; - if ( $usecache ) { + $sync->{ debug } and myprint( "Host1: found that msg $h1_fold/$h1_msg equals Host2 $h2_fold/$h2_msg\n" ) ; + if ( $usecache ) + { $debugcache and myprint( "touch $cache_dir/${h1_msg}_$h2_msg\n" ) ; touch( "$cache_dir/${h1_msg}_$h2_msg" ) or croak( "Couldn't touch $cache_dir/${h1_msg}_$h2_msg" ) ; } - }elsif( exists $h2_folders_of_md5{ $m_id } ) { - my @folders_dup = keys %{ $h2_folders_of_md5{ $m_id } } ; - ( $debug or $debugcrossduplicates ) and myprint( "Host1 found msg $h1_fold/$h1_msg is also in Host2 folders @folders_dup\n" ) ; } - $total_bytes_skipped += $h1_size ; - $nb_msg_skipped += 1 ; - $h1_nb_msg_processed +=1 ; + elsif( exists $sync->{ h2_folders_of_md5 }->{ $m_id } ) + { + my @folders_dup = keys %{ $sync->{ h2_folders_of_md5 }->{ $m_id } } ; + ( $sync->{ debug } or $debugcrossduplicates ) and myprint( "Host1: found that msg $h1_fold/$h1_msg is also in Host2 folders @folders_dup\n" ) ; + $sync->{ h2_nb_msg_crossdup } +=1 ; + } + $sync->{ total_bytes_skipped } += $h1_size ; + $sync->{ nb_msg_skipped } += 1 ; + $sync->{ h1_nb_msg_processed } +=1 ; } if ( exists $h2_hash{ $m_id } ) { #$debug and myprint( "MESSAGE $m_id\n" ) ; my $h2_msg = $h2_hash{$m_id}{'m'}; if ( $sync->{resyncflags} ) { - sync_flags_fir( $h1_fold, $h1_msg, $h2_fold, $h2_msg, $permanentflags2, $h1_fir_ref, $h2_fir_ref ) ; + sync_flags_fir( $sync, $h1_fold, $h1_msg, $h2_fold, $h2_msg, $permanentflags2, $h1_fir_ref, $h2_fir_ref ) ; } # Good my $h2_size = $h2_hash{$m_id}{'s'}; - $debug and myprint( - "Host1 size msg $h1_fold/$h1_msg = $h1_size <> $h2_size = Host2 $h2_fold/$h2_msg\n" ) ; + $sync->{ debug } and myprint( + "Host1: size msg $h1_fold/$h1_msg = $h1_size <> $h2_size = Host2 $h2_fold/$h2_msg\n" ) ; + + if ( $sync->{ resynclabels } ) + { + resynclabels( $sync, $h1_msg, $h2_msg, $h1_fir_ref, $h2_fir_ref, $h1_fold ) + } } if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } - - if ( $delete1 ) { - delete_message_on_host1( $h1_msg, $h1_fold ) ; + if ( $sync->{ delete1 } ) { + push @h1_msgs_to_delete, $h1_msg ; } } # END MESS: loop - if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } - MESS_IN_CACHE: foreach my $h1_msg ( @h1_msgs_in_cache ) { - my $h2_msg = $cache_1_2_ref->{ $h1_msg } ; - $debugcache and myprint( "cache messages update flags $h1_msg->$h2_msg\n" ) ; - if ( $sync->{resyncflags} ) { - sync_flags_fir( $h1_fold, $h1_msg, $h2_fold, $h2_msg, $permanentflags2, $h1_fir_ref, $h2_fir_ref ) ; - } - my $h1_size = $h1_fir_ref->{ $h1_msg }->{ 'RFC822.SIZE' } || 0 ; - $total_bytes_skipped += $h1_size; - $nb_msg_skipped += 1; - $h1_nb_msg_processed +=1 ; - if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + delete_message_on_host1( $sync, $h1_fold, $sync->{ expunge1 }, @h1_msgs_to_delete, @h1_msgs_in_cache ) ; + + if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + + # MESS_IN_CACHE: + if ( ! $sync->{ delete1 } ) + { + foreach my $h1_msg ( @h1_msgs_in_cache ) + { + my $h2_msg = $cache_1_2_ref->{ $h1_msg } ; + $debugcache and myprint( "cache messages update flags $h1_msg->$h2_msg\n" ) ; + if ( $sync->{resyncflags} ) + { + sync_flags_fir( $sync, $h1_fold, $h1_msg, $h2_fold, $h2_msg, $permanentflags2, $h1_fir_ref, $h2_fir_ref ) ; + } + my $h1_size = $h1_fir_ref->{ $h1_msg }->{ 'RFC822.SIZE' } || 0 ; + $sync->{ total_bytes_skipped } += $h1_size; + $sync->{ nb_msg_skipped } += 1; + $sync->{ h1_nb_msg_processed } +=1 ; + } } + if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } + + @h1_msgs_to_delete = ( ) ; #myprint( "Messages by uid: ", map { "$_ " } keys %h1_msgs_copy_by_uid, "\n" ) ; - MESS_BY_UID: foreach my $h1_msg ( sort { $a <=> $b } keys %h1_msgs_copy_by_uid ) { - # - $debug and myprint( "Copy by uid $h1_fold/$h1_msg\n" ) ; + # MESS_BY_UID: + foreach my $h1_msg ( sort { $a <=> $b } keys %h1_msgs_copy_by_uid ) + { + $sync->{ debug } and myprint( "Copy by uid $h1_fold/$h1_msg\n" ) ; if ( ! reconnect_12_if_needed( $sync ) ) { last FOLDER ; } my $h2_msg = copy_message( $sync, $h1_msg, $h1_fold, $h2_fold, $h1_fir_ref, $permanentflags2, $cache_dir ) ; - if( $delete2 and exists $h2_folders_from_1_several{ $h2_fold } and $h2_msg ) { + if( $sync->{ delete2 } and exists $h2_folders_from_1_several{ $h2_fold } and $h2_msg ) { myprint( "Host2: msg $h2_fold/$h2_msg will cancel deletion [fresh copy] on host2\n" ) ; $uid_candidate_no_deletion{ $h2_fold }{ $h2_msg }++ ; } - last FOLDER if total_bytes_max_reached( ) ; + last FOLDER if total_bytes_max_reached( $sync ) ; } - if ( $expunge1 ){ + if ( $sync->{ expunge1 } ){ myprint( "Host1: Expunging folder $h1_fold $sync->{dry_message}\n" ) ; - if ( ! $sync->{dry} ) { $imap1->expunge( ) } ; + if ( ! $sync->{dry} ) { $sync->{imap1}->expunge( ) } ; } - if ( $expunge2 ){ + if ( $sync->{ expunge2 } ){ myprint( "Host2: Expunging folder $h2_fold $sync->{dry_message}\n" ) ; - if ( ! $sync->{dry} ) { $imap2->expunge( ) } ; + if ( ! $sync->{dry} ) { $sync->{imap2}->expunge( ) } ; } - $debug and myprint( 'Time: ', timenext( ), " s\n" ) ; + $sync->{ debug } and myprint( 'Time: ', timenext( ), " s\n" ) ; } myprint( "++++ End looping on each folder\n" ) ; -if ( $delete1 and $sync->{'delete1emptyfolders'} ) { +if ( $sync->{ delete1 } and $sync->{ delete1emptyfolders } ) { delete1emptyfolders( $sync ) ; } -( $debug or $sync->{debugfolders} ) and myprint( 'Time: ', timenext( ), " s\n" ) ; +( $sync->{ debug } or $sync->{debugfolders} ) and myprint( 'Time: ', timenext( ), " s\n" ) ; if ( $foldersizesatend ) { @@ -2252,11 +2439,11 @@ Folders sizes after the synchronization. You can remove this foldersizes listing by using "--nofoldersizesatend" END_SIZE - foldersizesatend( ) ; + foldersizesatend( $sync ) ; } -if ( ! lost_connection( $imap1, "for host1 [$sync->{host1}]" ) ) { $imap1->logout( ) ; } -if ( ! lost_connection( $imap2, "for host2 [$sync->{host2}]" ) ) { $imap2->logout( ) ; } +if ( ! lost_connection( $sync, $sync->{imap1}, "for host1 [$sync->{host1}]" ) ) { $sync->{imap1}->logout( ) ; } +if ( ! lost_connection( $sync, $sync->{imap2}, "for host2 [$sync->{host2}]" ) ) { $sync->{imap2}->logout( ) ; } stats( $sync ) ; myprint( errorsdump( $sync->{nb_errors}, errors_log( $sync ) ) ) if ( $sync->{errorsdump} ) ; @@ -2270,23 +2457,27 @@ exit_clean( $sync, $EX_OK ) ; # subroutines -sub myprint { +sub myprint +{ #print @ARG ; print { $sync->{ tee } || \*STDOUT } @ARG ; return ; } -sub myprintf { +sub myprintf +{ printf { $sync->{ tee } || \*STDOUT } @ARG ; return ; } -sub mysprintf { +sub mysprintf +{ my( $format, @list ) = @ARG ; return sprintf $format, @list ; } -sub output_start { +sub output_start +{ my $mysync = shift @ARG ; if ( not $mysync ) { return ; } @@ -2297,7 +2488,8 @@ sub output_start { } -sub tests_output_start { +sub tests_output_start +{ note( 'Entering tests_output_start()' ) ; my $mysync = { } ; @@ -2313,7 +2505,8 @@ sub tests_output_start { return ; } -sub tests_output { +sub tests_output +{ note( 'Entering tests_output()' ) ; my $mysync = { } ; @@ -2329,7 +2522,8 @@ sub tests_output { return ; } -sub output { +sub output +{ my $mysync = shift @ARG ; if ( not $mysync ) { return ; } @@ -2341,7 +2535,8 @@ sub output { -sub tests_output_reset_with { +sub tests_output_reset_with +{ note( 'Entering tests_output_reset_with()' ) ; my $mysync = { } ; @@ -2356,7 +2551,8 @@ sub tests_output_reset_with { return ; } -sub output_reset_with { +sub output_reset_with +{ my $mysync = shift @ARG ; if ( not $mysync ) { return ; } @@ -2368,13 +2564,14 @@ sub output_reset_with { -sub abort { +sub abort +{ my $mysync = shift @ARG ; - if ( ! -r $sync->{pidfile} ) { - myprint( "Can not read pidfile $sync->{pidfile}. Exiting.\n" ) ; + if ( ! -r $mysync->{pidfile} ) { + myprint( "Can not read pidfile $mysync->{pidfile}. Exiting.\n" ) ; exit $EX_OK ; } - my $pidtokill = firstline( $sync->{pidfile} ) ; + my $pidtokill = firstline( $mysync->{pidfile} ) ; if ( ! $pidtokill ) { myprint( "No process to abort. Exiting.\n" ) ; exit $EX_OK ; @@ -2401,14 +2598,19 @@ sub abort { if ( kill 'ZERO', $pidtokill ) { myprint( "Process PID $pidtokill still there. Can not do much. Exiting.\n" ) ; exit $EX_OK ; + }else{ + myprint( "Process PID $pidtokill ended. Exiting.\n" ) ; + exit $EX_OK ; } + # well abort job done anyway - return ; + exit $EX_OK ; } -sub docker_context { +sub docker_context +{ my $mysync = shift ; -e '/.dockerenv' || return ; myprint( "Docker context detected with /.dockerenv\n" ) ; @@ -2419,11 +2621,12 @@ sub docker_context { # In case myprint( "Changing current directory to /var/tmp/\n" ) ; chdir '/var/tmp/' ; - + return ; } -sub cgibegin { +sub cgibegin +{ my $mysync = shift ; if ( ! under_cgi_context( $mysync ) ) { return ; } require CGI ; @@ -2434,8 +2637,10 @@ sub cgibegin { return ; } -sub tests_under_cgi_context { +sub tests_under_cgi_context +{ note( 'Entering tests_under_cgi_context()' ) ; + # $ENV{SERVER_SOFTWARE} = 'under imapsync' ; do { # Not in cgi context @@ -2462,7 +2667,8 @@ sub tests_under_cgi_context { } -sub under_cgi_context { +sub under_cgi_context +{ my $mysync = shift ; # Under cgi context if ( $ENV{SERVER_SOFTWARE} ) { @@ -2472,7 +2678,8 @@ sub under_cgi_context { return ; } -sub cgibuildheader { +sub cgibuildheader +{ my $mysync = shift ; if ( ! under_cgi_context( $mysync ) ) { return ; } @@ -2509,19 +2716,24 @@ sub cgibuildheader { return ; } -sub cgiload { - my $mysync = shift ; - if ( ! under_cgi_context( $mysync ) ) { return ; } - if ( $mysync->{ abort } ) { return ; } # keep going to abort - if ( $mysync->{ loaddelay } ) { - myprint( "Server is on heavy load. Be back in $mysync->{ loaddelay } min. Load is $mysync->{ loadavg }\n") ; - exit_clean( $mysync, $EX_UNAVAILABLE ) ; - } - return ; +sub cgiload +{ + # Exit on heavy load in CGI context + my $mysync = shift ; + if ( ! under_cgi_context( $mysync ) ) { return ; } + if ( $mysync->{ abort } ) { return ; } # keep going to abort since some ressources will be free soon + if ( $mysync->{ loaddelay } ) + { + myprint( "Server is on heavy load. Be back in $mysync->{ loaddelay } min. Load is $mysync->{ loadavg }\n") ; + exit_clean( $mysync, $EX_UNAVAILABLE ) ; + } + return ; } -sub tests_set_umask { +sub tests_set_umask +{ note( 'Entering tests_set_umask()' ) ; + my $save_umask = umask ; my $mysync = {} ; @@ -2536,7 +2748,8 @@ sub tests_set_umask { return ; } -sub set_umask { +sub set_umask +{ my $mysync = shift ; my $previous_umask = umask_str( ) ; my $new_umask = umask_str( $UMASK_PARANO ) ; @@ -2547,8 +2760,10 @@ sub set_umask { return ; } -sub tests_umask_str { +sub tests_umask_str +{ note( 'Entering tests_umask_str()' ) ; + my $save_umask = umask ; is( umask_str( ), umask_str( ), 'umask_str: no parameters => idopotent' ) ; @@ -2580,7 +2795,8 @@ sub tests_umask_str { return ; } -sub umask_str { +sub umask_str +{ my $value = shift ; if ( defined $value ) { @@ -2591,8 +2807,10 @@ sub umask_str { return( sprintf( '%#04o', $current ) ) ; } -sub tests_umask { +sub tests_umask +{ note( 'Entering tests_umask()' ) ; + my $save_umask ; is( umask, umask, 'umask: umask is umask' ) ; is( $save_umask = umask, umask, "umask: umask is umask again + save it: $save_umask" ) ; @@ -2613,7 +2831,8 @@ sub tests_umask { return ; } -sub cgisetcontext { +sub cgisetcontext +{ my $mysync = shift ; if ( ! under_cgi_context( $mysync ) ) { return ; } @@ -2621,7 +2840,7 @@ sub cgisetcontext { set_umask( $mysync ) ; # Remove all content in unsafe evaled options - @regextrans2 = ( ) ; + @{ $mysync->{ regextrans2 } } = ( ) ; @regexflag = ( ) ; @regexmess = ( ) ; @skipmess = ( ) ; @@ -2651,15 +2870,26 @@ sub cgisetcontext { chdir $cgidir or die "Can not cd to $cgidir: $OS_ERROR\n" ; $mysync->{ tmpdir } = $cgidir ; cgioutputenvcontext( $mysync ) ; - $debug and output( $mysync, 'Current directory is ' . getcwd( ) . "\n" ) ; - $debug and output( $mysync, 'Real user id is ' . getpwuid_any_os( $REAL_USER_ID ) . " (uid $REAL_USER_ID)\n" ) ; - $debug and output( $mysync, 'Effective user id is ' . getpwuid_any_os( $EFFECTIVE_USER_ID ). " (euid $EFFECTIVE_USER_ID)\n" ) ; - # @{ $mysync->{ sigexit } } = ( 'QUIT' ) ; - # output( $mysync, "Setting the QUIT signal to exit properly\n" ) ; + $mysync->{ debug } and output( $mysync, 'Current directory is ' . getcwd( ) . "\n" ) ; + $mysync->{ debug } and output( $mysync, 'Real user id is ' . getpwuid_any_os( $REAL_USER_ID ) . " (uid $REAL_USER_ID)\n" ) ; + $mysync->{ debug } and output( $mysync, 'Effective user id is ' . getpwuid_any_os( $EFFECTIVE_USER_ID ). " (euid $EFFECTIVE_USER_ID)\n" ) ; + + $skipemptyfolders = defined $skipemptyfolders ? $skipemptyfolders : 1 ; + + # Out of memory with messages over 1 GB ? + $mysync->{ maxsize } = defined $mysync->{ maxsize } ? $mysync->{ maxsize } : 1_000_000_000 ; + + # tail -f behaviour on by default + $mysync->{ tail } = defined $mysync->{ tail } ? $mysync->{ tail } : 1 ; + + # not sure it's for good + @useheader = qw( Message-Id ) ; + return ; } -sub cgioutputenvcontext { +sub cgioutputenvcontext +{ my $mysync = shift @ARG ; for my $envvar ( qw( REMOTE_ADDR REMOTE_HOST HTTP_REFERER HTTP_USER_AGENT SERVER_SOFTWARE SERVER_PORT HTTP_COOKIE ) ) { @@ -2675,7 +2905,8 @@ sub cgioutputenvcontext { -sub debugsleep { +sub debugsleep +{ my $mysync = shift @ARG ; if ( defined $mysync->{debugsleep} ) { myprint( "Info: sleeping $mysync->{debugsleep}s\n" ) ; @@ -2684,48 +2915,426 @@ sub debugsleep { return ; } -sub foldersizes_on_h1h2 { +sub foldersizes_on_h1h2 +{ + my $mysync = shift ; + myprint( << 'END_SIZE' ) ; Folders sizes before the synchronization. -You can remove foldersizes listings by using "--nofoldersizes" and "--nofoldersizesatend" -but then you will also loose the ETA (Estimation Time of Arrival) given after each message copy. +You can remove foldersizes listings by using "--nofoldersizes" and "--nofoldersizesatend" +but then you will also lose the ETA (Estimation Time of Arrival) given after each message copy. END_SIZE - ( $h1_nb_msg_start, $h1_bytes_start ) = foldersizes( 'Host1', $imap1, $search1, $sync->{abletosearch1}, @h1_folders_wanted ) ; - ( $h2_nb_msg_start, $h2_bytes_start ) = foldersizes( 'Host2', $imap2, $search2, $sync->{abletosearch2}, @h2_folders_from_1_wanted ) ; + ( $h1_nb_msg_start, $h1_bytes_start ) = foldersizes( 'Host1', $mysync->{imap1}, $search1, $mysync->{abletosearch1}, @h1_folders_wanted ) ; + ( $h2_nb_msg_start, $h2_bytes_start ) = foldersizes( 'Host2', $mysync->{imap2}, $search2, $mysync->{abletosearch2}, @h2_folders_from_1_wanted ) ; if ( not all_defined( $h1_nb_msg_start, $h1_bytes_start, $h2_nb_msg_start, $h2_bytes_start ) ) { my $error = "Failure getting foldersizes, ETA and final diff will not be displayed\n" ; - errors_incr( $sync, $error ) ; + errors_incr( $mysync, $error ) ; $foldersizes = 0 ; $foldersizesatend = 0 ; return ; } - my $h2_bytes_limit = $sync->{h2}->{quota_limit_bytes} || 0 ; + my $h2_bytes_limit = $mysync->{h2}->{quota_limit_bytes} || 0 ; if ( $h2_bytes_limit and ( $h2_bytes_limit < $h1_bytes_start ) ) { my $quota_percent = mysprintf( '%.0f', $NUMBER_100 * $h1_bytes_start / $h2_bytes_limit ) ; my $error = "Host2: Quota limit will be exceeded! Over $quota_percent % ( $h1_bytes_start bytes / $h2_bytes_limit bytes )\n" ; - errors_incr( $sync, $error ) ; + errors_incr( $mysync, $error ) ; } return ; } -sub total_bytes_max_reached { +sub total_bytes_max_reached +{ + my $mysync = shift ; - return( 0 ) if not $exitwhenover ; - if ( $sync->{total_bytes_transferred} >= $exitwhenover ) { - myprint( "Maximum bytes transferred reached, $sync->{total_bytes_transferred} >= $exitwhenover, ending sync\n" ) ; + if ( ! $mysync->{ exitwhenover } ) { + return( 0 ) ; + } + if ( $mysync->{ total_bytes_transferred } >= $mysync->{ exitwhenover } ) { + myprint( "Maximum bytes transferred reached, $mysync->{total_bytes_transferred} >= $mysync->{ exitwhenover }, ending sync\n" ) ; return( 1 ) ; } + return ; +} + +sub tests_mock_capability +{ + note( 'Entering tests_mock_capability()' ) ; + + my $myimap ; + ok( $myimap = mock_capability( ), + 'mock_capability: (1) no args => a Test::MockObject' + ) ; + ok( $myimap->isa( 'Test::MockObject' ), + 'mock_capability: (2) no args => a Test::MockObject' + ) ; + + is( undef, $myimap->capability( ), + 'mock_capability: (3) no args => capability undef' + ) ; + + ok( mock_capability( $myimap ), + 'mock_capability: (1) one arg => MockObject' + ) ; + + is( undef, $myimap->capability( ), + 'mock_capability: (2) one arg OO style => capability undef' + ) ; + + ok( mock_capability( $myimap, $NUMBER_123456 ), + 'mock_capability: (1) two args 123456 => capability 123456' + ) ; + + is( $NUMBER_123456, $myimap->capability( ), + 'mock_capability: (2) two args 123456 => capability 123456' + ) ; + + ok( mock_capability( $myimap, 'ABCD' ), + 'mock_capability: (1) two args ABCD => capability ABCD' + ) ; + is( 'ABCD', $myimap->capability( ), + 'mock_capability: (2) two args ABCD => capability ABCD' + ) ; + + ok( mock_capability( $myimap, [ 'ABCD' ] ), + 'mock_capability: (1) two args [ ABCD ] => capability [ ABCD ]' + ) ; + is_deeply( [ 'ABCD' ], $myimap->capability( ), + 'mock_capability: (2) two args [ ABCD ] => capability [ ABCD ]' + ) ; + + ok( mock_capability( $myimap, [ 'ABC', 'DEF' ] ), + 'mock_capability: (1) two args [ ABC, DEF ] => capability [ ABC, DEF ]' + ) ; + is_deeply( [ 'ABC', 'DEF' ], $myimap->capability( ), + 'mock_capability: (2) two args [ ABC, DEF ] => capability capability [ ABC, DEF ]' + ) ; + + ok( mock_capability( $myimap, 'ABC', 'DEF' ), + 'mock_capability: (1) two args ABC, DEF => capability [ ABC, DEF ]' + ) ; + is_deeply( [ 'ABC', 'DEF' ], [ $myimap->capability( ) ], + 'mock_capability: (2) two args ABC, DEF => capability capability [ ABC, DEF ]' + ) ; + + ok( mock_capability( $myimap, 'IMAP4rev1', 'APPENDLIMIT=123456' ), + 'mock_capability: (1) two args IMAP4rev1, APPENDLIMIT=123456 => capability [ IMAP4rev1, APPENDLIMIT=123456 ]' + ) ; + is_deeply( [ 'IMAP4rev1', 'APPENDLIMIT=123456' ], [ $myimap->capability( ) ], + 'mock_capability: (2) two args IMAP4rev1, APPENDLIMIT=123456 => capability capability [ IMAP4rev1, APPENDLIMIT=123456 ]' + ) ; + + note( 'Leaving tests_mock_capability()' ) ; + return ; +} + +sub sig_install_toggle_sleep +{ + my $mysync = shift ; + if ( ! 'MSWin32' eq $OSNAME ) { + sig_install( $mysync, \&toggle_sleep, 'USR1' ) + } + return ; +} + + +sub mock_capability +{ + my $myimap = shift ; + my @has_capability_value = @ARG ; + my ( $has_capability_value ) = @has_capability_value ; + + if ( ! $myimap ) + { + require_ok( "Test::MockObject" ) ; + $myimap = Test::MockObject->new( ) ; + } + + $myimap->mock( + 'capability', + sub { return wantarray ? + @has_capability_value + : $has_capability_value ; + } + ) ; + + return $myimap ; +} + + +sub tests_capability_of +{ + note( 'Entering tests_capability_of()' ) ; + + is( undef, capability_of( ), + 'capability_of: no args => undef' ) ; + + my $myimap ; + is( undef, capability_of( $myimap ), + 'capability_of: undef => undef' ) ; + + + $myimap = mock_capability( $myimap, 'IMAP4rev1', 'APPENDLIMIT=123456' ) ; + + is( undef, capability_of( $myimap, 'CACA' ), + 'capability_of: two args unknown capability => undef' ) ; + + + is( $NUMBER_123456, capability_of( $myimap, 'APPENDLIMIT' ), + 'capability_of: two args APPENDLIMIT 123456 => 123456 yeah!' ) ; + + note( 'Leaving tests_capability_of()' ) ; + return ; +} + + +sub capability_of +{ + my $imap = shift || return ; + my $capability_keyword = shift || return ; + + my @capability = $imap->capability ; + + if ( ! @capability ) { return ; } + my $capability_value = search_in_array( $capability_keyword, @capability ) ; + + return $capability_value ; +} + + +sub tests_search_in_array +{ + note( 'Entering tests_search_in_array()' ) ; + + is( undef, search_in_array( 'KA' ), + 'search_in_array: no array => undef ' ) ; + + is( 'VA', search_in_array( 'KA', ( 'KA=VA' ) ), + 'search_in_array: KA KA=VA => VA ' ) ; + + is( 'VA', search_in_array( 'KA', ( 'KA=VA', 'KB=VB' ) ), + 'search_in_array: KA KA=VA KB=VB => VA ' ) ; + + is( 'VB', search_in_array( 'KB', ( 'KA=VA', 'KB=VB' ) ), + 'search_in_array: KA=VA KB=VB => VB ' ) ; + + note( 'Leaving tests_search_in_array()' ) ; + return ; +} + +sub search_in_array +{ + my ( $key, @array ) = @ARG ; + + foreach my $item ( @array ) + { + + if ( $item =~ /([^=]+)=(.*)/ ) + { + if ( $1 eq $key ) + { + return $2 ; + } + } + } + + return ; } -sub all_defined { + +sub tests_appendlimit_from_capability +{ + note( 'Entering tests_appendlimit_from_capability()' ) ; + + is( undef, appendlimit_from_capability( ), + 'appendlimit_from_capability: no args => undef' + ) ; + + my $myimap ; + is( undef, appendlimit_from_capability( $myimap ), + 'appendlimit_from_capability: undef arg => undef' + ) ; + + + $myimap = mock_capability( $myimap, 'IMAP4rev1', 'APPENDLIMIT=123456' ) ; + + # Normal behavior + is( $NUMBER_123456, appendlimit_from_capability( $myimap ), + 'appendlimit_from_capability: APPENDLIMIT=123456 => 123456' + ) ; + + # Not a number + $myimap = mock_capability( $myimap, 'IMAP4rev1', 'APPENDLIMIT=ABC' ) ; + + is( undef, appendlimit_from_capability( $myimap ), + 'appendlimit_from_capability: not a number => undef' + ) ; + + note( 'Leaving tests_appendlimit_from_capability()' ) ; + return ; +} + + +sub appendlimit_from_capability +{ + my $myimap = shift ; + if ( ! $myimap ) + { + myprint( "Warn: no imap with call to appendlimit_from_capability\n" ) ; + return ; + } + + #myprint( Data::Dumper->Dump( [ \$myimap ] ) ) ; + my $appendlimit = capability_of( $myimap, 'APPENDLIMIT' ) ; + #myprint( "has_capability APPENDLIMIT $appendlimit\n" ) ; + if ( is_an_integer( $appendlimit ) ) + { + return $appendlimit ; + } + return ; +} + + +sub tests_appendlimit +{ + note( 'Entering tests_appendlimit()' ) ; + + is( undef, appendlimit( ), + 'appendlimit: no args => undef' + ) ; + + my $mysync = { } ; + + is( undef, appendlimit( $mysync ), + 'appendlimit: no imap2 => undef' + ) ; + + my $myimap ; + $myimap = mock_capability( $myimap, 'IMAP4rev1', 'APPENDLIMIT=123456' ) ; + + $mysync->{ imap2 } = $myimap ; + + is( 123456, appendlimit( $mysync ), + 'appendlimit: imap2 with APPENDLIMIT=123456 => 123456' + ) ; + + note( 'Leaving tests_appendlimit()' ) ; + return ; +} + +sub appendlimit +{ + my $mysync = shift || return ; + my $myimap = $mysync->{ imap2 } ; + + my $appendlimit = appendlimit_from_capability( $myimap ) ; + if ( defined $appendlimit ) + { + myprint( "Host2: found APPENDLIMIT=$appendlimit in CAPABILITY\n" ) ; + return $appendlimit ; + } + return ; + +} + + +sub tests_maxsize_setting +{ + note( 'Entering tests_maxsize_setting()' ) ; + + is( undef, maxsize_setting( ), + 'maxsize_setting: no args => undef' + ) ; + + my $mysync ; + + is( undef, maxsize_setting( $mysync ), + 'maxsize_setting: undef arg => undef' + ) ; + + $mysync = { } ; + $mysync->{ maxsize } = $NUMBER_123456 ; + + is( $NUMBER_123456, maxsize_setting( $mysync ), + 'maxsize_setting: --maxsize 123456 alone => 123456' + ) ; + + + $mysync = { } ; + my $myimap ; + + $myimap = mock_capability( $myimap, 'IMAP4rev1', 'APPENDLIMIT=654321' ) ; + $mysync->{ imap2 } = $myimap ; + + # APPENDLIMIT alone + is( $NUMBER_654321, maxsize_setting( $mysync ), + 'maxsize_setting: APPENDLIMIT 654321 alone => 654321' + ) ; + + is( $NUMBER_654321, $mysync->{ maxsize }, + 'maxsize_setting: APPENDLIMIT 654321 alone => maxsize 654321' + ) ; + + + + # Case: "APPENDLIMIT >= --maxsize" => maxsize. + $mysync->{ maxsize } = $NUMBER_123456 ; + + is( $NUMBER_123456, maxsize_setting( $mysync ), + 'maxsize_setting: APPENDLIMIT 654321 --maxsize 123456 => 123456' + ) ; + + # Case: "APPENDLIMIT < --maxsize" => APPENDLIMIT. + + $myimap = mock_capability( $myimap, 'IMAP4rev1', 'APPENDLIMIT=123456' ) ; + $mysync->{ maxsize } = $NUMBER_654321 ; + + is( $NUMBER_123456, maxsize_setting( $mysync ), + 'maxsize_setting: APPENDLIMIT 123456 --maxsize 654321 => 123456 ' + ) ; + + note( 'Leaving tests_maxsize_setting()' ) ; + + return ; +} + +sub maxsize_setting +{ + my $mysync = shift || return ; + + $mysync->{ appendlimit } = appendlimit( $mysync ) ; + + my $maxsize ; + + if ( all_defined( $mysync->{ appendlimit }, $mysync->{ maxsize } ) ) + { + return min( $mysync->{ maxsize }, $mysync->{ appendlimit } ) ; + } + elsif ( defined $mysync->{ appendlimit } ) + { + $mysync->{ maxsize } = $mysync->{ appendlimit } ; + return $mysync->{ maxsize } ; + }elsif ( defined $mysync->{ maxsize } ) + { + return $mysync->{ maxsize } ; + }else + { + return ; + } +} + + + + +sub all_defined +{ if ( not @ARG ) { return 0 ; } @@ -2737,7 +3346,8 @@ sub all_defined { return 1 ; } -sub tests_all_defined { +sub tests_all_defined +{ note( 'Entering tests_all_defined()' ) ; is( 0, all_defined( ), 'all_defined: no param => 0' ) ; @@ -2754,7 +3364,8 @@ sub tests_all_defined { } -sub tests_hashsynclocal { +sub tests_hashsynclocal +{ note( 'Entering tests_hashsynclocal()' ) ; my $mysync = { @@ -2792,7 +3403,8 @@ sub tests_hashsynclocal { return ; } -sub hashsynclocal { +sub hashsynclocal +{ my $mysync = shift ; my $hashkey = shift ; # Optional, only there for tests my $hashfile = $mysync->{ hashfile } ; @@ -2810,7 +3422,8 @@ sub hashsynclocal { } -sub tests_hashsync { +sub tests_hashsync +{ note( 'Entering tests_hashsync()' ) ; @@ -2827,7 +3440,8 @@ sub tests_hashsync { return ; } -sub hashsync { +sub hashsync +{ my $mysync = shift ; my $hashkey = shift ; @@ -2845,7 +3459,8 @@ sub hashsync { } -sub tests_createhashfileifneeded { +sub tests_createhashfileifneeded +{ note( 'Entering tests_createhashfileifneeded()' ) ; is( undef, createhashfileifneeded( ), 'createhashfileifneeded: no parameters => undef' ) ; @@ -2854,7 +3469,8 @@ sub tests_createhashfileifneeded { return ; } -sub createhashfileifneeded { +sub createhashfileifneeded +{ my $hashfile = shift ; my $hashkey = shift || rand32( ) ; @@ -2887,7 +3503,8 @@ sub createhashfileifneeded { return ; } -sub tests_rand32 { +sub tests_rand32 +{ note( 'Entering tests_rand32()' ) ; my $string = rand32( ) ; @@ -2899,14 +3516,16 @@ sub tests_rand32 { return ; } -sub rand32 { +sub rand32 +{ my @chars = ( "a".."z" ) ; my $string; $string .= $chars[rand @chars] for 1..32 ; return $string ; } -sub imap_id_stuff { +sub imap_id_stuff +{ my $mysync = shift ; if ( not $mysync->{id} ) { return ; } ; @@ -2919,7 +3538,8 @@ sub imap_id_stuff { return ; } -sub imap_id { +sub imap_id +{ my ( $mysync, $imap, $Side ) = @_ ; $Side ||= q{} ; @@ -2943,7 +3563,8 @@ sub imap_id { return( $imap_id_response ) ; } -sub imapsync_id { +sub imapsync_id +{ my $mysync = shift ; my $overhashref = shift ; # See http://tools.ietf.org/html/rfc2971.html @@ -2957,7 +3578,7 @@ sub imapsync_id { vendor => 'Gilles LAMIRAL', 'support-url' => 'https://imapsync.lamiral.info/', # Example of date-time: 19-Sep-2015 08:56:07 - date => date_from_rcs( q{$Date: 2018/05/05 21:10:43 $ } ), + date => date_from_rcs( q{$Date: 2019/05/01 22:14:00 $ } ), } ; my $imapsync_id_github = { @@ -2966,7 +3587,7 @@ sub imapsync_id { os => $OSNAME, vendor => 'github', 'support-url' => 'https://github.com/imapsync/imapsync', - date => date_from_rcs( q{$Date: 2018/05/05 21:10:43 $ } ), + date => date_from_rcs( q{$Date: 2019/05/01 22:14:00 $ } ), } ; $imapsync_id = $imapsync_id_lamiral ; @@ -2977,7 +3598,8 @@ sub imapsync_id { return( $imapsync_id_str ) ; } -sub tests_imapsync_id { +sub tests_imapsync_id +{ note( 'Entering tests_imapsync_id()' ) ; my $mysync ; @@ -2987,17 +3609,18 @@ sub tests_imapsync_id { version => 111, os => 'beurk', date => '22-12-1968', - side => 'host1' + side => 'host1' } ), - 'tests_imapsync_id override' + 'tests_imapsync_id override' ) ; note( 'Leaving tests_imapsync_id()' ) ; return ; } -sub format_for_imap_arg { +sub format_for_imap_arg +{ my $ref = shift ; my $string = q{} ; @@ -3018,7 +3641,8 @@ sub format_for_imap_arg { -sub tests_format_for_imap_arg { +sub tests_format_for_imap_arg +{ note( 'Entering tests_format_for_imap_arg()' ) ; ok( 'NIL' eq format_for_imap_arg( { } ), 'format_for_imap_arg empty hash ref' ) ; @@ -3029,8 +3653,9 @@ sub tests_format_for_imap_arg { return ; } -sub quota { - my ( $imap, $side, $mysync ) = @_ ; +sub quota +{ + my ( $mysync, $imap, $side ) = @_ ; my %side = ( h1 => 'Host1', @@ -3050,8 +3675,8 @@ sub quota { #$imap->quota( '""' ) ; myprint( "\n" ) ; $imap->Debug( $debug_before ) ; - my $quota_limit_bytes = quota_extract_storage_limit_in_bytes( $getquotaroot ) ; - my $quota_current_bytes = quota_extract_storage_current_in_bytes( $getquotaroot ) ; + my $quota_limit_bytes = quota_extract_storage_limit_in_bytes( $mysync, $getquotaroot ) ; + my $quota_current_bytes = quota_extract_storage_current_in_bytes( $mysync, $getquotaroot ) ; $mysync->{$side}->{quota_limit_bytes} = $quota_limit_bytes ; $mysync->{$side}->{quota_current_bytes} = $quota_current_bytes ; my $quota_percent ; @@ -3068,61 +3693,69 @@ sub quota { return ; } -sub tests_quota_extract_storage_limit_in_bytes { - note( 'Entering tests_quota_extract_storage_limit_in_bytes()' ) ; +sub tests_quota_extract_storage_limit_in_bytes +{ + note( 'Entering tests_quota_extract_storage_limit_in_bytes()' ) ; + my $mysync = {} ; my $imap_output = [ '* QUOTAROOT "INBOX" "Storage quota" "Messages quota"', '* QUOTA "Storage quota" (STORAGE 1 104857600)', '* QUOTA "Messages quota" (MESSAGE 2 100000)', '5 OK Getquotaroot completed.' ] ; - ok( $NUMBER_104_857_600 * $KIBI == quota_extract_storage_limit_in_bytes( $imap_output ), 'quota_extract_storage_limit_in_bytes ') ; + ok( $NUMBER_104_857_600 * $KIBI == quota_extract_storage_limit_in_bytes( $mysync, $imap_output ), 'quota_extract_storage_limit_in_bytes ') ; - note( 'Leaving tests_quota_extract_storage_limit_in_bytes()' ) ; + note( 'Leaving tests_quota_extract_storage_limit_in_bytes()' ) ; return ; } -sub quota_extract_storage_limit_in_bytes { +sub quota_extract_storage_limit_in_bytes +{ + my $mysync = shift ; my $imap_output = shift ; my $limit_kb ; $limit_kb = ( map { /.*\(\s*STORAGE\s+\d+\s+(\d+)\s*\)/x ? $1 : () } @{ $imap_output } )[0] ; $limit_kb ||= 0 ; - $debug and myprint( "storage_limit_kb = $limit_kb\n" ) ; + $mysync->{ debug } and myprint( "storage_limit_kb = $limit_kb\n" ) ; return( $KIBI * $limit_kb ) ; } -sub tests_quota_extract_storage_current_in_bytes { +sub tests_quota_extract_storage_current_in_bytes +{ note( 'Entering tests_quota_extract_storage_current_in_bytes()' ) ; - + my $mysync = {} ; my $imap_output = [ '* QUOTAROOT "INBOX" "Storage quota" "Messages quota"', '* QUOTA "Storage quota" (STORAGE 1 104857600)', '* QUOTA "Messages quota" (MESSAGE 2 100000)', '5 OK Getquotaroot completed.' ] ; - ok( 1*$KIBI == quota_extract_storage_current_in_bytes( $imap_output ), 'quota_extract_storage_current_in_bytes: 1 => 1024 ') ; + ok( 1*$KIBI == quota_extract_storage_current_in_bytes( $mysync, $imap_output ), 'quota_extract_storage_current_in_bytes: 1 => 1024 ') ; note( 'Leaving tests_quota_extract_storage_current_in_bytes()' ) ; return ; } -sub quota_extract_storage_current_in_bytes { +sub quota_extract_storage_current_in_bytes +{ + my $mysync = shift ; my $imap_output = shift ; my $current_kb ; $current_kb = ( map { /.*\(\s*STORAGE\s+(\d+)\s+\d+\s*\)/x ? $1 : () } @{ $imap_output } )[0] ; $current_kb ||= 0 ; - $debug and myprint( "storage_current_kb = $current_kb\n" ) ; + $mysync->{ debug } and myprint( "storage_current_kb = $current_kb\n" ) ; return( $KIBI * $current_kb ) ; } -sub automap { +sub automap +{ my ( $mysync ) = @_ ; if ( $mysync->{automap} ) { @@ -3132,8 +3765,8 @@ sub automap { return ; } - $mysync->{h1_special} = special_from_folders_hash( $mysync->{imap1}, 'Host1' ) ; - $mysync->{h2_special} = special_from_folders_hash( $mysync->{imap2}, 'Host2' ) ; + $mysync->{h1_special} = special_from_folders_hash( $mysync, $mysync->{imap1}, 'Host1' ) ; + $mysync->{h2_special} = special_from_folders_hash( $mysync, $mysync->{imap2}, 'Host2' ) ; build_possible_special( $mysync ) ; build_guess_special( $mysync ) ; @@ -3145,7 +3778,8 @@ sub automap { -sub build_guess_special { +sub build_guess_special +{ my ( $mysync ) = shift ; foreach my $h1_fold ( sort keys %{ $mysync->{h1_folders_all} } ) { @@ -3175,7 +3809,8 @@ sub build_guess_special { return ; } -sub guess_special { +sub guess_special +{ my( $folder, $possible_special_ref, $prefix ) = @_ ; my $folder_no_prefix = $folder ; @@ -3189,7 +3824,8 @@ sub guess_special { return( $guess_special ) ; } -sub tests_guess_special { +sub tests_guess_special +{ note( 'Entering tests_guess_special()' ) ; my $possible_special_ref = build_possible_special( my $mysync ) ; @@ -3202,9 +3838,10 @@ sub tests_guess_special { return ; } -sub build_automap { +sub build_automap +{ my $mysync = shift ; - $debug and myprint( "Entering build_automap\n" ) ; + $mysync->{ debug } and myprint( "Entering build_automap\n" ) ; foreach my $h1_fold ( @{ $mysync->{h1_folders_wanted} } ) { my $h2_fold ; my $h1_special = $mysync->{h1_special}{$h1_fold} ; @@ -3253,7 +3890,8 @@ sub build_automap { # I will not add what there is at: # http://stackoverflow.com/questions/2185391/localized-gmail-imap-folders/2185548#2185548 # because it works well without -sub build_possible_special { +sub build_possible_special +{ my $mysync = shift ; my $possible_special = { } ; # All|Archive|Drafts|Flagged|Junk|Sent|Trash @@ -3272,19 +3910,41 @@ sub build_possible_special { 'Elementy wys&AUI-ane'] ; $possible_special->{'\Trash'} = [ 'Trash', 'TRASH', '&BCMENAQwBDsENQQ9BD0ESwQ1-', '&BBoEPgRABDcEOAQ9BDA-', 'Kosz', 'Deleted Items' ] ; - + foreach my $special ( qw( \All \Archive \Drafts \Flagged \Junk \Sent \Trash ) ){ foreach my $possible_folder ( @{ $possible_special->{$special} } ) { $possible_special->{ $possible_folder } = $special ; } ; } $mysync->{possible_special} = $possible_special ; - $debug and myprint( Data::Dumper->Dump( [ $possible_special ], [ 'possible_special' ] ) ) ; + $mysync->{ debug } and myprint( Data::Dumper->Dump( [ $possible_special ], [ 'possible_special' ] ) ) ; return( $possible_special ) ; } -sub special_from_folders_hash { - my ( $imap, $side ) = @_ ; +sub tests_special_from_folders_hash +{ + note( 'Entering tests_special_from_folders_hash()' ) ; + + my $mysync = {} ; + require_ok( "Test::MockObject" ) ; + my $imapT = Test::MockObject->new( ) ; + + is( undef, special_from_folders_hash( ), 'special_from_folders_hash: no args' ) ; + is( undef, special_from_folders_hash( $mysync ), 'special_from_folders_hash: undef args' ) ; + is_deeply( {}, special_from_folders_hash( $mysync, $imapT ), 'special_from_folders_hash: $imap void' ) ; + + $imapT->mock( 'folders_hash', sub { return( [ { name => 'Sent', attrs => [ '\Sent' ] } ] ) } ) ; + + is_deeply( { Sent => '\Sent', '\Sent' => 'Sent' }, + special_from_folders_hash( $mysync, $imapT ), 'special_from_folders_hash: $imap \Sent' ) ; + + note( 'Leaving tests_special_from_folders_hash()' ) ; + return( ) ; +} + +sub special_from_folders_hash +{ + my ( $mysync, $imap, $side ) = @_ ; my %special = ( ) ; if ( ! defined $imap ) { return ; } @@ -3292,7 +3952,7 @@ sub special_from_folders_hash { if ( ! $imap->can( 'folders_hash' ) ) { my $error = "$side: To have automagic rfc6154 folder mapping, upgrade Mail::IMAPClient >= 3.34\n" ; - errors_incr( $sync, $error ) ; + errors_incr( $mysync, $error ) ; return( \%special ) ; # empty hash ref } my $folders_hash = $imap->folders_hash( ) ; @@ -3315,26 +3975,10 @@ sub special_from_folders_hash { return( \%special ) ; } -sub tests_special_from_folders_hash { - note( 'Entering tests_special_from_folders_hash()' ) ; - - - require Test::MockObject ; - my $imapT = Test::MockObject->new( ) ; - - is( undef, special_from_folders_hash( ), 'special_from_folders_hash: no args' ) ; - is_deeply( {}, special_from_folders_hash( $imapT ), 'special_from_folders_hash: $imap void' ) ; - - $imapT->mock( 'folders_hash', sub { return( [ { name => 'Sent', attrs => [ '\Sent' ] } ] ) } ) ; - is_deeply( { Sent => '\Sent', '\Sent' => 'Sent' }, special_from_folders_hash( $imapT ), 'special_from_folders_hash: $imap \Sent' ) ; - - note( 'Leaving tests_special_from_folders_hash()' ) ; - return( ) ; -} - -sub errors_incr { +sub errors_incr +{ my ( $mysync, @error ) = @ARG ; - $sync->{nb_errors}++ ; + $mysync->{nb_errors}++ ; if ( @error ) { errors_log( $mysync, @error ) ; @@ -3342,10 +3986,10 @@ sub errors_incr { } $mysync->{errorsmax} ||= $ERRORS_MAX ; - if ( $sync->{nb_errors} >= $mysync->{errorsmax} ) { - myprint( "Maximum number of errors $mysync->{errorsmax} reached ( you can change $mysync->{errorsmax} to any value, for example 100 with --errorsmax 100 ). Exiting.\n" ) ; + if ( $mysync->{nb_errors} >= $mysync->{errorsmax} ) { + myprint( "Maximum number of errors $mysync->{errorsmax} reached ( you can change $mysync->{errorsmax} to any value, for example 100 with --errorsmax 100 ). Exiting.\n" ) ; if ( $mysync->{errorsdump} ) { - myprint( errorsdump( $sync->{nb_errors}, errors_log( $mysync ) ) ) ; + myprint( errorsdump( $mysync->{nb_errors}, errors_log( $mysync ) ) ) ; # again since errorsdump( ) can be very verbose and masquerade previous warning myprint( "Maximum number of errors $mysync->{errorsmax} reached ( you can change $mysync->{errorsmax} to any value, for example 100 with --errorsmax 100 ). Exiting.\n" ) ; } @@ -3354,7 +3998,23 @@ sub errors_incr { return ; } -sub errors_log { +sub tests_errors_log +{ + note( 'Entering tests_errors_log()' ) ; + is( undef, errors_log( ), 'errors_log: no args => undef' ) ; + my $mysync = {} ; + is( undef, errors_log( $mysync ), 'errors_log: empty => undef' ) ; + is_deeply( [ 'aieaie' ], [ errors_log( $mysync, 'aieaie' ) ], 'errors_log: aieaie => aieaie' ) ; + # cumulative + is_deeply( [ 'aieaie' ], [ errors_log( $mysync ) ], 'errors_log: nothing more => aieaie' ) ; + is_deeply( [ 'aieaie', 'ouille' ], [ errors_log( $mysync, 'ouille' ) ], 'errors_log: ouille => aieaie ouille' ) ; + is_deeply( [ 'aieaie', 'ouille' ], [ errors_log( $mysync ) ], 'errors_log: nothing more => aieaie ouille' ) ; + note( 'Leaving tests_errors_log()' ) ; + return ; +} + +sub errors_log +{ my ( $mysync, @error ) = @ARG ; if ( ! $mysync->{errors_log} ) { @@ -3372,16 +4032,9 @@ sub errors_log { } } -sub tests_errors_log { - note( 'Entering tests_errors_log()' ) ; - - note( 'Leaving tests_errors_log()' ) ; - return ; -} - - -sub errorsdump { +sub errorsdump +{ my( $nb_errors, @errors_log ) = @ARG ; my $error_num = 0 ; my $errors_list = q{} ; @@ -3396,7 +4049,8 @@ sub errorsdump { } -sub tests_live_result { +sub tests_live_result +{ note( 'Entering tests_live_result()' ) ; my $nb_errors = shift ; @@ -3409,77 +4063,94 @@ sub tests_live_result { return ; } -sub foldersizesatend { +sub foldersizesatend +{ + my $mysync = shift ; timenext( ) ; - return if ( $imap1->IsUnconnected( ) ) ; - return if ( $imap2->IsUnconnected( ) ) ; + return if ( $mysync->{imap1}->IsUnconnected( ) ) ; + return if ( $mysync->{imap2}->IsUnconnected( ) ) ; # Get all folders on host2 again since new were created - @h2_folders_all = sort $imap2->folders(); + @h2_folders_all = sort $mysync->{imap2}->folders(); for ( @h2_folders_all ) { $h2_folders_all{ $_ } = 1 ; $h2_folders_all_UPPER{ uc $_ } = 1 ; } ; - ( $h1_nb_msg_end, $h1_bytes_end ) = foldersizes( 'Host1', $imap1, $search1, $sync->{abletosearch1}, @h1_folders_wanted ) ; - ( $h2_nb_msg_end, $h2_bytes_end ) = foldersizes( 'Host2', $imap2, $search2, $sync->{abletosearch2}, @h2_folders_from_1_wanted ) ; + ( $h1_nb_msg_end, $h1_bytes_end ) = foldersizes( 'Host1', $mysync->{imap1}, $search1, $mysync->{abletosearch1}, @h1_folders_wanted ) ; + ( $h2_nb_msg_end, $h2_bytes_end ) = foldersizes( 'Host2', $mysync->{imap2}, $search2, $mysync->{abletosearch2}, @h2_folders_from_1_wanted ) ; if ( not all_defined( $h1_nb_msg_end, $h1_bytes_end, $h2_nb_msg_end, $h2_bytes_end ) ) { my $error = "Failure getting foldersizes, final differences will not be calculated\n" ; - errors_incr( $sync, $error ) ; + errors_incr( $mysync, $error ) ; } return ; } -sub size_filtered_flag { +sub size_filtered_flag +{ + my $mysync = shift ; my $h1_size = shift ; - if (defined $maxsize and $h1_size >= $maxsize) { + if ( defined $mysync->{ maxsize } and $h1_size >= $mysync->{ maxsize } ) { return( 1 ) ; } - if (defined $minsize and $h1_size <= $minsize) { + if ( defined $minsize and $h1_size <= $minsize ) { return( 1 ) ; } return( 0 ) ; } -sub sync_flags_fir { - my ( $h1_fold, $h1_msg, $h2_fold, $h2_msg, $permanentflags2, $h1_fir_ref, $h2_fir_ref ) = @_ ; +sub sync_flags_fir +{ + my ( $mysync, $h1_fold, $h1_msg, $h2_fold, $h2_msg, $permanentflags2, $h1_fir_ref, $h2_fir_ref ) = @_ ; if ( not defined $h1_msg ) { return } ; if ( not defined $h2_msg ) { return } ; my $h1_size = $h1_fir_ref->{$h1_msg}->{'RFC822.SIZE'} ; - return if size_filtered_flag( $h1_size ) ; + return if size_filtered_flag( $mysync, $h1_size ) ; # used cached flag values for efficiency my $h1_flags = $h1_fir_ref->{ $h1_msg }->{ 'FLAGS' } || q{} ; my $h2_flags = $h2_fir_ref->{ $h2_msg }->{ 'FLAGS' } || q{} ; - sync_flags( $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $h2_flags, $permanentflags2 ) ; + sync_flags( $mysync, $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $h2_flags, $permanentflags2 ) ; return ; } -sub sync_flags_after_copy { - my( $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $permanentflags2 ) = @_ ; +sub sync_flags_after_copy +{ + # Activated with option --syncflagsaftercopy + my( $mysync, $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $permanentflags2 ) = @_ ; - my @h2_flags = $imap2->flags( $h2_msg ) ; - my $h2_flags = "@h2_flags" ; - ( $debug or $debugflags ) and myprint( "Host2 flags before resync by STORE on msg $h2_msg: $h2_flags\n" ) ; - sync_flags( $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $h2_flags, $permanentflags2 ) ; + if ( my @h2_flags = $mysync->{imap2}->flags( $h2_msg ) ) { + my $h2_flags = "@h2_flags" ; + ( $mysync->{ debug } or $debugflags ) and myprint( "Host2: msg $h2_fold/$h2_msg flags before sync flags after copy ( $h2_flags )\n" ) ; + sync_flags( $mysync, $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $h2_flags, $permanentflags2 ) ; + }else{ + myprint( "Host2: msg $h2_fold/$h2_msg could not get its flags for sync flags after copy\n" ) ; + } return ; } -sub sync_flags { - my( $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $h2_flags, $permanentflags2 ) = @_ ; +# Globals +# $debug +# $debugflags +# $permanentflags2 - ( $debug or $debugflags ) and - myprint( "Host1: flags init msg $h1_fold/$h1_msg flags( $h1_flags ) Host2 $h2_fold/$h2_msg flags( $h2_flags )\n" ) ; + +sub sync_flags +{ + my( $mysync, $h1_fold, $h1_msg, $h1_flags, $h2_fold, $h2_msg, $h2_flags, $permanentflags2 ) = @_ ; + + ( $mysync->{ debug } or $debugflags ) and + myprint( "Host1: flags init msg $h1_fold/$h1_msg flags( $h1_flags ) Host2 msg $h2_fold/$h2_msg flags( $h2_flags )\n" ) ; $h1_flags = flags_for_host2( $h1_flags, $permanentflags2 ) ; $h2_flags = flagscase( $h2_flags ) ; - ( $debug or $debugflags ) and - myprint( "Host1 flags filt msg $h1_fold/$h1_msg flags( $h1_flags ) Host2 $h2_fold/$h2_msg flags( $h2_flags )\n" ) ; + ( $mysync->{ debug } or $debugflags ) and + myprint( "Host1: flags filt msg $h1_fold/$h1_msg flags( $h1_flags ) Host2 msg $h2_fold/$h2_msg flags( $h2_flags )\n" ) ; # compare flags - set flags if there a difference @@ -3487,17 +4158,18 @@ sub sync_flags { my @h2_flags = sort split(q{ }, $h2_flags ); my $diff = compare_lists( \@h1_flags, \@h2_flags ); - $diff and ( $debug or $debugflags ) - and myprint( "Host2 flags msg $h2_fold/$h2_msg replacing h2 flags( $h2_flags ) with h1 flags( $h1_flags )\n" ) ; - # This sets flags so flags can be removed with this - # When you remove a \Seen flag on host1 you want to it - # to be removed on host2. Just add flags is not what - # we need most of the time. + $diff and ( $mysync->{ debug } or $debugflags ) + and myprint( "Host2: flags msg $h2_fold/$h2_msg replacing h2 flags( $h2_flags ) with h1 flags( $h1_flags )\n" ) ; - if ( not $sync->{dry} and $diff and not $imap2->store( $h2_msg, "FLAGS.SILENT (@h1_flags)" ) ) { - my $error_msg = join q{}, "Host2 flags msg $h2_fold/$h2_msg could not add flags [@h1_flags]: ", - $imap2->LastError || q{}, "\n" ; - errors_incr( $sync, $error_msg ) ; + # This sets flags exactly. So flags can be removed with this. + # When you remove a \Seen flag on host1 you want it + # to be removed on host2. Just add flags is not what + # we need most of the time, so no + like in "+FLAGS.SILENT". + + if ( not $mysync->{dry} and $diff and not $mysync->{imap2}->store( $h2_msg, "FLAGS.SILENT (@h1_flags)" ) ) { + my $error_msg = join q{}, "Host2: flags msg $h2_fold/$h2_msg could not add flags [@h1_flags]: ", + $mysync->{imap2}->LastError || q{}, "\n" ; + errors_incr( $mysync, $error_msg ) ; } return ; @@ -3505,11 +4177,13 @@ sub sync_flags { -sub _filter { +sub _filter +{ + my $mysync = shift ; my $str = shift or return q{} ; my $sz = $SIZE_MAX_STR ; my $len = length $str ; - if ( not $debug and $len > $sz*2 ) { + if ( not $mysync->{ debug } and $len > $sz*2 ) { my $beg = substr $str, 0, $sz ; my $end = substr $str, -$sz, $sz ; $str = $beg . '...' . $end ; @@ -3520,17 +4194,18 @@ sub _filter { -sub lost_connection { - my( $imap, $error_message ) = @_; +sub lost_connection +{ + my( $mysync, $imap, $error_message ) = @_; if ( $imap->IsUnconnected( ) ) { - $sync->{nb_errors}++ ; + $mysync->{nb_errors}++ ; my $lcomm = $imap->LastIMAPCommand || q{} ; my $einfo = $imap->LastError || @{$imap->History}[$LAST] || q{} ; # if string is long try reduce to a more reasonable size - $lcomm = _filter( $lcomm ) ; - $einfo = _filter( $einfo ) ; - myprint( "Failure: last command: $lcomm\n") if ($debug && $lcomm) ; + $lcomm = _filter( $mysync, $lcomm ) ; + $einfo = _filter( $mysync, $einfo ) ; + myprint( "Failure: last command: $lcomm\n") if ( $mysync->{ debug } && $lcomm) ; myprint( "Failure: lost connection $error_message: ", $einfo, "\n") ; return( 1 ) ; } @@ -3539,8 +4214,10 @@ sub lost_connection { } } -sub tests_max { +sub tests_max +{ note( 'Entering tests_max()' ) ; + is( 0, max( 0 ), 'max 0 => 0' ) ; is( 1, max( 1 ), 'max 1 => 1' ) ; is( $MINUS_ONE, max( $MINUS_ONE ), 'max -1 => -1') ; @@ -3566,10 +4243,11 @@ sub tests_max { return ; } -sub max { +sub max +{ my @list = @_ ; return( undef ) if ( 0 == scalar @list ) ; - + my( @numbers, @notnumbers ) ; foreach my $item ( @list ) { if ( is_number( $item ) ) { @@ -3578,7 +4256,7 @@ sub max { push @notnumbers, $item ; } } - + my @sorted ; if ( @numbers ) { @sorted = sort { $a <=> $b } @numbers ; @@ -3587,11 +4265,14 @@ sub max { }else{ return ; } - + return( pop @sorted ) ; } -sub tests_is_number { +sub tests_is_number +{ + note( 'Entering tests_is_number()' ) ; + ok( ! is_number( ), 'is_number: no args => undef ' ) ; ok( is_number( 1 ), 'is_number: 1 => 1' ) ; ok( is_number( 1.1 ), 'is_number: 1.1 => 1' ) ; @@ -3603,23 +4284,27 @@ sub tests_is_number { ok( ! is_number( '0haha' ), 'is_number: 0haha => no' ) ; ok( ! is_number( '2haha' ), 'is_number: 2haha => no' ) ; ok( ! is_number( 'haha2' ), 'is_number: haha2 => no' ) ; + + note( 'Leaving tests_is_number()' ) ; return ; } -sub is_number { +sub is_number +{ my $item = shift ; - + if ( ! defined $item ) { return ; } - + if ( $item =~ /\A$RE{num}{real}\Z/ ) { return 1 ; } return ; } -sub tests_min { +sub tests_min +{ note( 'Entering tests_min()' ) ; is( 0, min( 0 ), 'min 0 => 0' ) ; @@ -3633,7 +4318,7 @@ sub tests_min { is( 1, min( '100', '42', 1 ), 'min 100 42 1 => 1' ) ; is( 1, min( $NUMBER_100, 'haha', 1 ), 'min 100 haha 1 => 1') ; is( $MINUS_ONE, min( $MINUS_ONE, 1 ), 'min -1 1 => -1') ; - + is( 1, min( undef, 1 ), 'min undef 1 => 1' ) ; is( 0, min( undef, 0 ), 'min undef 0 => 0' ) ; is( 1, min( undef, 1 ), 'min undef 1 => 1' ) ; @@ -3649,7 +4334,8 @@ sub tests_min { } -sub min { +sub min +{ my @list = @_ ; return( undef ) if ( 0 == scalar @list ) ; @@ -3661,7 +4347,7 @@ sub min { push @notnumbers, $item ; } } - + my @sorted ; if ( @numbers ) { @sorted = sort { $a <=> $b } @numbers ; @@ -3675,8 +4361,10 @@ sub min { } -sub check_lib_version { - $debug and myprint( "IMAPClient $Mail::IMAPClient::VERSION\n" ) ; +sub check_lib_version +{ + my $mysync = shift ; + $mysync->{ debug } and myprint( "IMAPClient $Mail::IMAPClient::VERSION\n" ) ; if ( '2.2.9' eq $Mail::IMAPClient::VERSION ) { myprint( "imapsync no longer supports Mail::IMAPClient 2.2.9, upgrade it\n" ) ; return 0 ; @@ -3689,18 +4377,21 @@ sub check_lib_version { return ; } -sub module_version_str { +sub module_version_str +{ my( $module_name, $module_version ) = @_ ; my $str = mysprintf( "%-20s %s\n", $module_name, $module_version ) ; return( $str ) ; } -sub modulesversion { +sub modulesversion +{ my @list_version; my %modulesversion = ( 'Authen::NTLM' => sub { $Authen::NTLM::VERSION }, + 'CGI' => sub { $CGI::VERSION }, 'Compress::Zlib' => sub { $Compress::Zlib::VERSION }, 'Crypt::OpenSSL::RSA' => sub { $Crypt::OpenSSL::RSA::VERSION }, 'Data::Uniqid' => sub { $Data::Uniqid::VERSION }, @@ -3711,10 +4402,11 @@ sub modulesversion { 'File::Spec' => sub { $File::Spec::VERSION }, 'Getopt::Long' => sub { $Getopt::Long::VERSION }, 'HTML::Entities' => sub { $HTML::Entities::VERSION }, - 'IO::Socket::INET6' => sub { $IO::Socket::INET6::VERSION }, - 'IO::Socket::INET' => sub { $IO::Socket::INET::VERSION }, - 'IO::Socket::SSL' => sub { $IO::Socket::SSL::VERSION }, 'IO::Socket' => sub { $IO::Socket::VERSION }, + 'IO::Socket::INET' => sub { $IO::Socket::INET::VERSION }, + 'IO::Socket::INET6' => sub { $IO::Socket::INET6::VERSION }, + 'IO::Socket::IP' => sub { $IO::Socket::IP::VERSION }, + 'IO::Socket::SSL' => sub { $IO::Socket::SSL::VERSION }, 'IO::Tee' => sub { $IO::Tee::VERSION }, 'JSON' => sub { $JSON::VERSION }, 'JSON::WebToken' => sub { $JSON::WebToken::VERSION }, @@ -3742,17 +4434,58 @@ sub modulesversion { push @list_version, module_version_str( $module_name, $v ) ; } - return( @list_version ) ; } +sub tests_command_line_nopassword +{ + note( 'Entering tests_command_line_nopassword()' ) ; + + ok( q{} eq command_line_nopassword(), 'command_line_nopassword void' ); + my $mysync = {} ; + ok( '--blabla' eq command_line_nopassword( $mysync, '--blabla' ), 'command_line_nopassword --blabla' ); + #myprint( command_line_nopassword((qw{ --password1 secret1 })), "\n" ) ; + ok( '--password1 MASKED' eq command_line_nopassword( $mysync, qw{ --password1 secret1}), 'command_line_nopassword --password1' ); + ok( '--blabla --password1 MASKED --blibli' + eq command_line_nopassword( $mysync, qw{ --blabla --password1 secret1 --blibli } ), 'command_line_nopassword --password1 --blibli' ); + $mysync->{showpasswords} = 1 ; + ok( q{} eq command_line_nopassword(), 'command_line_nopassword void' ); + ok( '--blabla' eq command_line_nopassword( $mysync, '--blabla'), 'command_line_nopassword --blabla' ); + #myprint( command_line_nopassword((qw{ --password1 secret1 })), "\n" ) ; + ok( '--password1 secret1' eq command_line_nopassword( $mysync, qw{ --password1 secret1} ), 'command_line_nopassword --password1' ); + ok( '--blabla --password1 secret1 --blibli' + eq command_line_nopassword( $mysync, qw{ --blabla --password1 secret1 --blibli } ), 'command_line_nopassword --password1 --blibli' ); + + note( 'Leaving tests_command_line_nopassword()' ) ; + return ; +} + # Construct a command line copy with passwords replaced by MASKED. -sub command_line_nopassword { - my @argv = @_ ; +sub command_line_nopassword +{ + my $mysync = shift @ARG ; + my @argv = @ARG ; my @argv_nopassword ; - return( "@argv" ) if $sync->{showpasswords} ; + if ( $mysync->{ cmdcgi } ) { + @argv_nopassword = mask_password_value( @{ $mysync->{ cmdcgi } } ) ; + return( "@argv_nopassword" ) ; + } + + if ( $mysync->{showpasswords} ) + { + return( "@argv" ) ; + } + + @argv_nopassword = mask_password_value( @argv ) ; + return("@argv_nopassword") ; +} + +sub mask_password_value +{ + my @argv = @ARG ; + my @argv_nopassword ; while ( @argv ) { my $arg = shift @argv ; # option name or value if ( $arg =~ m/-password[12]/x ) { @@ -3762,34 +4495,54 @@ sub command_line_nopassword { push @argv_nopassword, $arg ; # same option or value } } - return("@argv_nopassword") ; + return @argv_nopassword ; } -sub tests_command_line_nopassword { - note( 'Entering tests_command_line_nopassword()' ) ; - ok(q{} eq command_line_nopassword(), 'command_line_nopassword void'); - ok('--blabla' eq command_line_nopassword('--blabla'), 'command_line_nopassword --blabla'); - #myprint( command_line_nopassword((qw{ --password1 secret1 })), "\n" ) ; - ok('--password1 MASKED' eq command_line_nopassword(qw{ --password1 secret1}), 'command_line_nopassword --password1'); - ok('--blabla --password1 MASKED --blibli' - eq command_line_nopassword(qw{ --blabla --password1 secret1 --blibli }), 'command_line_nopassword --password1 --blibli'); - $sync->{showpasswords} = 1 ; - ok(q{} eq command_line_nopassword(), 'command_line_nopassword void'); - ok('--blabla' eq command_line_nopassword('--blabla'), 'command_line_nopassword --blabla'); - #myprint( command_line_nopassword((qw{ --password1 secret1 })), "\n" ) ; - ok('--password1 secret1' eq command_line_nopassword(qw{ --password1 secret1}), 'command_line_nopassword --password1'); - ok('--blabla --password1 secret1 --blibli' - eq command_line_nopassword(qw{ --blabla --password1 secret1 --blibli }), 'command_line_nopassword --password1 --blibli'); +sub tests_get_stdin_masked +{ + note( 'Entering tests_get_stdin_masked()' ) ; - note( 'Leaving tests_command_line_nopassword()' ) ; + is( q{}, get_stdin_masked( ), 'get_stdin_masked: no args' ) ; + is( q{}, get_stdin_masked( 'Please ENTER: ' ), 'get_stdin_masked: ENTER' ) ; + + note( 'Leaving tests_get_stdin_masked()' ) ; return ; } -sub ask_for_password { - my ( $user, $host ) = @ARG ; - myprint( "What's the password for $user" . '@' . "$host? (not visible while you type, then enter RETURN) " ) ; +####################################################### +# The issue is that prompt() does not prompt the prompt +# when the program is used like +# { sleep 2 ; echo blablabla ; } | ./imapsync ...--host1 lo --user1 tata --host2 lo --user2 titi + +# use IO::Prompter ; +sub get_stdin_masked +{ + my $prompt = shift || 'Say something: ' ; + local @ARGV = () ; + my $input = prompt( + -prompt => $prompt, + -echo => '*', + ) ; + #myprint( "You said: $input\n" ) ; + return $input ; +} + +sub ask_for_password_new +{ + my $prompt = shift ; + my $password = get_stdin_masked( $prompt ) ; + return $password ; +} +######################################################### + + +sub ask_for_password +{ + my $prompt = shift ; + myprint( $prompt ) ; Term::ReadKey::ReadMode( 2 ) ; + ## no critic (InputOutput::ProhibitExplicitStdin) my $password = ; chomp $password ; myprint( "\nGot it\n" ) ; @@ -3799,34 +4552,38 @@ sub ask_for_password { # Have to refactor get_password1() get_password2() # to have only get_password() and two calls -sub get_password1 { +sub get_password1 +{ my $mysync = shift ; $mysync->{password1} - || $passfile1 + || $mysync->{ passfile1 } || 'PREAUTH' eq $authmech1 || 'EXTERNAL' eq $authmech1 || $ENV{IMAPSYNC_PASSWORD1} - || do { - myprint( << 'FIN_PASSFILE' ) ; + || do + { + myprint( << 'FIN_PASSFILE' ) ; If you are afraid of giving password on the command line arguments, you can put the password of user1 in a file named file1 and use "--passfile1 file1" instead of typing it. Then give this file restrictive permissions with the command "chmod 600 file1". An other solution is to set the environment variable IMAPSYNC_PASSWORD1 FIN_PASSFILE + my $user = $authuser1 || $mysync->{user1} ; + my $host = $mysync->{host1} ; + my $prompt = "What's the password for $user" . ' at ' . "$host? (not visible while you type, then enter RETURN) " ; + $mysync->{password1} = ask_for_password( $prompt ) ; + } ; - $mysync->{password1} = ask_for_password( $authuser1 || $mysync->{user1}, $mysync->{host1} ) ; - } ; - - if ( defined $passfile1 ) { - if ( ! -e -r $passfile1 ) { - myprint( "Failure: file from parameter --passfile1 $passfile1 does not exist or is not readable\n" ) ; + if ( defined $mysync->{ passfile1 } ) { + if ( ! -e -r $mysync->{ passfile1 } ) { + myprint( "Failure: file from parameter --passfile1 $mysync->{ passfile1 } does not exist or is not readable\n" ) ; exit_clean( $mysync, $EX_NOINPUT ) ; } # passfile1 readable - $mysync->{password1} = firstline ( $passfile1 ) ; + $mysync->{password1} = firstline ( $mysync->{ passfile1 } ) ; return ; } if ( $ENV{IMAPSYNC_PASSWORD1} ) { @@ -3836,35 +4593,39 @@ FIN_PASSFILE return ; } -sub get_password2 { +sub get_password2 +{ my $mysync = shift ; $mysync->{password2} - || $passfile2 + || $mysync->{ passfile2 } || 'PREAUTH' eq $authmech2 || 'EXTERNAL' eq $authmech2 || $ENV{IMAPSYNC_PASSWORD2} - || do { - myprint( << 'FIN_PASSFILE' ) ; + || do + { + myprint( << 'FIN_PASSFILE' ) ; If you are afraid of giving password on the command line arguments, you can put the password of user2 in a file named file2 and use "--passfile2 file2" instead of typing it. Then give this file restrictive permissions with the command "chmod 600 file2". An other solution is to set the environment variable IMAPSYNC_PASSWORD2 FIN_PASSFILE - - $mysync->{password2} = ask_for_password( $authuser2 || $mysync->{user2}, $mysync->{host2} ) ; - } ; + my $user = $authuser2 || $mysync->{user2} ; + my $host = $mysync->{host2} ; + my $prompt = "What's the password for $user" . ' at ' . "$host? (not visible while you type, then enter RETURN) " ; + $mysync->{password2} = ask_for_password( $prompt ) ; + } ; - if ( defined $passfile2 ) { - if ( ! -e -r $passfile2 ) { - myprint( "Failure: file from parameter --passfile2 $passfile2 does not exist or is not readable\n" ) ; + if ( defined $mysync->{ passfile2 } ) { + if ( ! -e -r $mysync->{ passfile2 } ) { + myprint( "Failure: file from parameter --passfile2 $mysync->{ passfile2 } does not exist or is not readable\n" ) ; exit_clean( $mysync, $EX_NOINPUT ) ; } # passfile2 readable - $mysync->{password2} = firstline ( $passfile2 ) ; + $mysync->{password2} = firstline ( $mysync->{ passfile2 } ) ; return ; } if ( $ENV{IMAPSYNC_PASSWORD2} ) { @@ -3876,63 +4637,169 @@ FIN_PASSFILE -sub catch_ignore { + +sub remove_tmp_files +{ + my $mysync = shift or return ; + $mysync->{pidfile} or return ; + if ( -e $mysync->{pidfile} ) { + unlink $mysync->{pidfile} ; + } + return ; +} + +sub cleanup_before_exit +{ + my $mysync = shift ; + remove_tmp_files( $mysync ) ; + if ( $mysync->{imap1} and $mysync->{imap1}->IsConnected() ) + { + myprint( "Disconnecting from host1 $mysync->{ host1 } user1 $mysync->{ user1 }\n" ) ; + $mysync->{imap1}->logout( ) ; + } + if ( $mysync->{imap2} and $mysync->{imap2}->IsConnected() ) + { + myprint( "Disconnecting from host2 $mysync->{ host2 } user2 $mysync->{ user2 }\n" ) ; + $mysync->{imap2}->logout( ) ; + } + if ( $mysync->{log} ) { + myprint( "Log file is $mysync->{logfile} ( to change it, use --logfile filepath ; or use --nolog to turn off logging )\n" ) ; + } + if ( $mysync->{log} and $mysync->{logfile_handle} ) { + #myprint( "Closing $mysync->{ logfile }\n" ) ; + close $mysync->{logfile_handle} ; + } + return ; +} + + + +sub exit_clean +{ + my $mysync = shift @ARG ; + my $status = shift @ARG ; + my @messages = @ARG ; + if ( @messages ) + { + myprint( @messages ) ; + } + myprint( "Exiting with return value $status\n" ) ; + cleanup_before_exit( $mysync ) ; + + exit $status ; +} + +sub missing_option +{ + my $mysync = shift ; + my $option = shift ; + exit_clean( $mysync, $EX_USAGE, "$option option is mandatory, for help run $PROGRAM_NAME --help\n" ) ; + return ; +} + + +sub catch_ignore +{ my $mysync = shift ; my $signame = shift ; - + my $sigcounter = ++$mysync->{ sigcounter }{ $signame } ; - myprint( "\nGot a signal $signame (my PID is $PROCESS_ID). Received $sigcounter $signame signals so far. Thanks!\n" ) ; + myprint( "\nGot a signal $signame (my PID is $PROCESS_ID my PPID is ", getppid( ), + "). Received $sigcounter $signame signals so far. Thanks!\n" ) ; stats( $mysync ) ; return ; } -sub catch_exit { +sub catch_exit +{ my $mysync = shift ; my $signame = shift || q{} ; if ( $signame ) { - myprint( "\nGot a signal $signame (my PID is $PROCESS_ID). Asked to terminate\n" ) ; + myprint( "\nGot a signal $signame (my PID is $PROCESS_ID my PPID is ", getppid( ), + "). Asked to terminate\n" ) ; + if ( $mysync->{stats} ) { + myprint( "Here are the final stats of this sync not completely finished so far\n" ) ; + stats( $mysync ) ; + myprint( "Ended by a signal $signame (my PID is $PROCESS_ID my PPID is ", + getppid( ), "). I am asked to terminate immediately.\n" ) ; + myprint( "You should resynchronize those accounts by running a sync again,\n", + "since some messages and entire folders might still be missing on host2.\n" ) ; + } + ## no critic (RequireLocalizedPunctuationVars) + $SIG{ $signame } = 'DEFAULT'; # restore default action + # kill myself with $signame + # https://www.cons.org/cracauer/sigint.html + myprint( "Killing myself with signal $signame\n" ) ; + cleanup_before_exit( $mysync ) ; + kill( $signame, $PROCESS_ID ) ; + } + else + { + exit_clean( $mysync, $EXIT_BY_SIGNAL ) ; } - myprint( "Here are the final stats of this sync not completely finished so far\n" ) ; - stats( $mysync ) ; - myprint( "Ended by a signal $signame (my PID is $PROCESS_ID). I am asked to terminate immediately.\n" ) ; - myprint( "You should resynchronize those accounts by running a sync again,\n", - "since some messages and entire folders might still be missing on host2.\n" ) ; - exit_clean( $mysync, $EXIT_BY_SIGNAL ) ; return ; } -sub catch_reconnect { + +sub catch_print +{ my $mysync = shift ; my $signame = shift ; - myprint( "\nGot a signal $signame (my PID is $PROCESS_ID)\n", - "Hit 2 ctr-c within 2 seconds to exit the program\n", - "Hit only 1 ctr-c to reconnect to both imap servers\n", - ) ; - if ( here_twice( $mysync ) ) { - myprint( "Got two signals $signame within $INTERVAL_TO_EXIT seconds. Exiting...\n" ) ; - catch_exit( $mysync ) ; - }else{ - myprint( "For now only one signal $signame within $INTERVAL_TO_EXIT seconds.\n" ) ; - } - if ( ! defined $mysync->{imap1} ) { return ; } - if ( ! defined $mysync->{imap2} ) { return ; } - - - myprint( "Info: reconnecting to host1 imap server\n" ) ; - $mysync->{imap1}->State( Mail::IMAPClient::Unconnected ) ; - $mysync->{imap1}->{IMAPSYNC_RECONNECT_COUNT} += 1 ; - $mysync->{imap1}->reconnect( ) ; - myprint( "Info: reconnecting to host2 imap server\n" ) ; - $mysync->{imap2}->State( Mail::IMAPClient::Unconnected ) ; - $mysync->{imap2}->{IMAPSYNC_RECONNECT_COUNT} += 1 ; - $mysync->{imap2}->reconnect( ) ; - myprint( "Info: reconnected to both imap servers\n" ) ; + my $sigcounter = ++$mysync->{ sigcounter }{ $signame } ; + myprint( "\nGot a signal $signame (my PID is $PROCESS_ID my PPID is ", getppid( ), + "). Received $sigcounter $signame signals so far. Thanks!\n" ) ; return ; } -sub tests_reconnect_12_if_needed { + +sub catch_reconnect +{ + my $mysync = shift ; + my $signame = shift ; + if ( here_twice( $mysync ) ) { + myprint( "Got two signals $signame within $INTERVAL_TO_EXIT seconds. Exiting...\n" ) ; + catch_exit( $mysync, $signame ) ; + }else{ + myprint( "\nGot a signal $signame (my PID is $PROCESS_ID my PPID is ", getppid( ), ")\n", + "Hit 2 ctr-c within 2 seconds to exit the program\n", + "Hit only 1 ctr-c to reconnect to both imap servers\n", + ) ; + myprint( "For now only one signal $signame within $INTERVAL_TO_EXIT seconds.\n" ) ; + + if ( ! defined $mysync->{imap1} ) { return ; } + if ( ! defined $mysync->{imap2} ) { return ; } + + myprint( "Info: reconnecting to host1 imap server $mysync->{host1}\n" ) ; + $mysync->{imap1}->State( Mail::IMAPClient::Unconnected ) ; + $mysync->{imap1}->{IMAPSYNC_RECONNECT_COUNT} += 1 ; + if ( $mysync->{imap1}->reconnect( ) ) + { + myprint( "Info: reconnected to host1 imap server $mysync->{host1}\n" ) ; + } + else + { + exit_clean( $mysync, $EXIT_CONNECTION_FAILURE ) ; + } + myprint( "Info: reconnecting to host2 imap server\n" ) ; + $mysync->{imap2}->State( Mail::IMAPClient::Unconnected ) ; + $mysync->{imap2}->{IMAPSYNC_RECONNECT_COUNT} += 1 ; + if ( $mysync->{imap2}->reconnect( ) ) + { + myprint( "Info: reconnected to host2 imap server $mysync->{host2}\n" ) ; + } + else + { + exit_clean( $mysync, $EXIT_CONNECTION_FAILURE ) ; + } + myprint( "Info: reconnected to both imap servers\n" ) ; + } + return ; +} + +sub tests_reconnect_12_if_needed +{ note( 'Entering tests_reconnect_12_if_needed()' ) ; my $mysync ; @@ -3949,7 +4816,8 @@ sub tests_reconnect_12_if_needed { return ; } -sub reconnect_12_if_needed { +sub reconnect_12_if_needed +{ my $mysync = shift ; #return 2 ; if ( ! reconnect_if_needed( $mysync->{imap1} ) ) { @@ -3963,7 +4831,8 @@ sub reconnect_12_if_needed { } -sub tests_reconnect_if_needed { +sub tests_reconnect_if_needed +{ note( 'Entering tests_reconnect_if_needed()' ) ; @@ -3983,7 +4852,8 @@ sub tests_reconnect_if_needed { return ; } -sub reconnect_if_needed { +sub reconnect_if_needed +{ # return undef upon failure. # return 1 upon connection success, with or without reconnection. @@ -4015,7 +4885,8 @@ sub reconnect_if_needed { -sub here_twice { +sub here_twice +{ my $mysync = shift ; my $now = time ; my $previous = $mysync->{lastcatch} || 0 ; @@ -4029,35 +4900,42 @@ sub here_twice { } -sub justconnect { - - $imap1 = connect_imap( $sync->{host1}, $sync->{port1}, $debugimap1, $sync->{ssl1}, $sync->{tls1}, 'Host1', $sync->{h1}->{timeout}, $sync->{h1} ) ; - $imap2 = connect_imap( $sync->{host2}, $sync->{port2}, $debugimap2, $sync->{ssl2}, $sync->{tls2}, 'Host2', $sync->{h2}->{timeout}, $sync->{h2} ) ; - $imap1->logout( ) ; - $imap2->logout( ) ; +sub justconnect +{ + my $mysync = shift ; + $mysync->{imap1} = connect_imap( $mysync->{host1}, $mysync->{port1}, $debugimap1, + $mysync->{ssl1}, $mysync->{tls1}, 'Host1', $mysync->{h1}->{timeout}, $mysync->{h1} ) ; + $mysync->{imap2} = connect_imap( $mysync->{host2}, $mysync->{port2}, $debugimap2, + $mysync->{ssl2}, $mysync->{tls2}, 'Host2', $mysync->{h2}->{timeout}, $mysync->{h2} ) ; + $mysync->{imap1}->logout( ) ; + $mysync->{imap2}->logout( ) ; return ; } +sub skip_macosx +{ + return ; + return( 'macosx.polarhome.com' eq hostname() ) ; +} -sub tests_mailimapclient_connect { +sub tests_mailimapclient_connect +{ note( 'Entering tests_mailimapclient_connect()' ) ; + my $imap ; - # ipv4 + # ipv4 ok( $imap = Mail::IMAPClient->new( ), 'mailimapclient_connect ipv4: new' ) ; is( 'Mail::IMAPClient', ref( $imap ), 'mailimapclient_connect ipv4: ref is Mail::IMAPClient' ) ; - SKIP: { - if ( 'macosx' eq hostname() - or 'macosx.polarhome.com' eq hostname() - ) { skip( 'Tests avoided on macosx get stuck', 1 ) ; } - is( undef, $imap->connect( ), 'mailimapclient_connect ipv4: connect with no server => failure' ) ; - } + # Mail::IMAPClient 3.40 die on this... So we skip it, thanks to "mature" IO::Socket::IP + # is( undef, $imap->connect( ), 'mailimapclient_connect ipv4: connect with no server => failure' ) ; + is( 'test.lamiral.info', $imap->Server( 'test.lamiral.info' ), 'mailimapclient_connect ipv4: setting Server(test.lamiral.info)' ) ; is( 1, $imap->Debug( 1 ), 'mailimapclient_connect ipv4: setting Debug( 1 )' ) ; is( 143, $imap->Port( 143 ), 'mailimapclient_connect ipv4: setting Port( 143 )' ) ; is( 3, $imap->Timeout( 3 ), 'mailimapclient_connect ipv4: setting Timout( 30 )' ) ; - like( ref( $imap->connect( ) ), qr/IO::Socket::INET/, 'mailimapclient_connect ipv4: connect to test.lamiral.info' ) ; + like( ref( $imap->connect( ) ), qr/IO::Socket::INET|IO::Socket::IP/, 'mailimapclient_connect ipv4: connect to test.lamiral.info' ) ; like( $imap->logout( ), qr/Mail::IMAPClient/, 'mailimapclient_connect ipv4: logout' ) ; is( undef, undef $imap, 'mailimapclient_connect ipv4: free variable' ) ; @@ -4070,14 +4948,23 @@ sub tests_mailimapclient_connect { like( ref( $imap->connect( ) ), qr/IO::Socket::SSL/, 'mailimapclient_connect ipv4 + ssl: connect to test.lamiral.info' ) ; is( $imap->logout( ), undef, 'mailimapclient_connect ipv4 + ssl: logout in ssl causes failure' ) ; is( undef, undef $imap, 'mailimapclient_connect ipv4 + ssl: free variable' ) ; - + # ipv6 + ssl ok( $imap = Mail::IMAPClient->new( ), 'mailimapclient_connect ipv6 + ssl: new' ) ; is( 'ks2ipv6.lamiral.info', $imap->Server( 'ks2ipv6.lamiral.info' ), 'mailimapclient_connect ipv6 + ssl: setting Server(ks2ipv6.lamiral.info)' ) ; ok( $imap->Ssl( [ SSL_verify_mode => SSL_VERIFY_NONE ] ), 'mailimapclient_connect ipv6 + ssl: setting Ssl( SSL_VERIFY_NONE )' ) ; is( 993, $imap->Port( 993 ), 'mailimapclient_connect ipv6 + ssl: setting Port( 993 )' ) ; SKIP: { - if ( 'CUILLERE' eq hostname() ) { skip( 'Tests avoided on CUILLERE can not do ipv6', 2 ) ; } + if ( + 'CUILLERE' eq hostname() + or + skip_macosx() + or + -e '/.dockerenv' + ) + { + skip( 'Tests avoided on CUILLERE can not do ipv6', 2 ) ; + } like( ref( $imap->connect( ) ), qr/IO::Socket::SSL/, 'mailimapclient_connect ipv6 + ssl: connect to ks2ipv6.lamiral.info' ) ; is( $imap->logout( ), undef, 'mailimapclient_connect ipv6 + ssl: logout in ssl causes failure' ) ; } @@ -4088,87 +4975,103 @@ sub tests_mailimapclient_connect { return ; } -sub tests_mailimapclient_connect_bug { + +sub tests_mailimapclient_connect_bug +{ note( 'Entering tests_mailimapclient_connect_bug()' ) ; + my $imap ; # ipv6 - ok( $imap = Mail::IMAPClient->new( ), 'mailimapclient_connect ipv6: new' ) ; - is( 'ks2ipv6.lamiral.info', $imap->Server( 'ks2ipv6.lamiral.info' ), 'mailimapclient_connect ipv6: setting Server(ks2ipv6.lamiral.info)' ) ; - is( 143, $imap->Port( 143 ), 'mailimapclient_connect ipv6: setting Port( 993 )' ) ; - + ok( $imap = Mail::IMAPClient->new( ), 'mailimapclient_connect_bug ipv6: new' ) ; + is( 'ks2ipv6.lamiral.info', $imap->Server( 'ks2ipv6.lamiral.info' ), 'mailimapclient_connect_bug ipv6: setting Server(ks2ipv6.lamiral.info)' ) ; + is( 143, $imap->Port( 143 ), 'mailimapclient_connect_bug ipv6: setting Port( 993 )' ) ; + SKIP: { - if ( 'CUILLERE' eq hostname() ) { skip( 'Tests avoided on CUILLERE can not do ipv6', 1 ) ; } - like( ref( $imap->connect( ) ), qr/IO::Socket::INET/, 'mailimapclient_connect ipv6: connect to ks2ipv6.lamiral.info' ) - or diag( 'mailimapclient_connect ipv6: ', $imap->LastError( ), $!, ) ; + if ( + 'CUILLERE' eq hostname() + or + skip_macosx() + or + -e '/.dockerenv' + ) + { + skip( 'Tests avoided on CUILLERE can not do ipv6', 1 ) ; + } + like( ref( $imap->connect( ) ), qr/IO::Socket::INET/, 'mailimapclient_connect_bug ipv6: connect to ks2ipv6.lamiral.info' ) + or diag( 'mailimapclient_connect_bug ipv6: ', $imap->LastError( ), $!, ) ; } - #is( $imap->logout( ), undef, 'mailimapclient_connect ipv6: logout in ssl causes failure' ) ; - is( undef, undef $imap, 'mailimapclient_connect ipv6: free variable' ) ; + #is( $imap->logout( ), undef, 'mailimapclient_connect_bug ipv6: logout in ssl causes failure' ) ; + is( undef, undef $imap, 'mailimapclient_connect_bug ipv6: free variable' ) ; note( 'Leaving tests_mailimapclient_connect_bug()' ) ; return ; } -sub mailimapclient_connect { - return ; -} - - - -sub tests_connect_socket { +sub tests_connect_socket +{ note( 'Entering tests_connect_socket()' ) ; - + is( undef, connect_socket( ), 'connect_socket: no args' ) ; my $socket ; my $imap ; SKIP: { - if ( 'CUILLERE' eq hostname() ) { skip( 'Tests avoided on CUILLERE cannot do ipv6', 2 ) ; } - - $socket = IO::Socket::INET6->new( - PeerAddr => 'ks2ipv6.lamiral.info', - PeerPort => 143, - ) ; + if ( + 'CUILLERE' eq hostname() + or + skip_macosx() + or + -e '/.dockerenv' + ) + { + skip( 'Tests avoided on CUILLERE/macosx.polarhome.com/docker cannot do ipv6', 2 ) ; + } - - ok( $imap = connect_socket( $socket ), 'connect_socket: ks2ipv6.lamiral.info port 143 IO::Socket::INET6' ) ; - #$imap->Debug( 1 ) ; - # myprint( $imap->capability( ) ) ; - if ( $imap ) { - $imap->logout( ) ; - } - - #$IO::Socket::SSL::DEBUG = 4 ; - $socket = IO::Socket::SSL->new( - PeerHost => 'ks2ipv6.lamiral.info', - PeerPort => 993, - SSL_verify_mode => SSL_VERIFY_NONE, - ) ; - # myprint $socket ; - ok( $imap = connect_socket( $socket ), 'connect_socket: ks2ipv6.lamiral.info port 993 IO::Socket::SSL' ) ; - #$imap->Debug( 1 ) ; - # myprint $imap->capability( ) ; - $socket->close( ) ; - if ( $imap ) { - $socket->close( ) ; - } - #$socket->close(SSL_no_shutdown => 1) ; - #$imap->logout( ) ; - #myprint "\n" ; - #$imap->logout( ) ; - } + $socket = IO::Socket::INET6->new( + PeerAddr => 'ks2ipv6.lamiral.info', + PeerPort => 143, + ) ; + + ok( $imap = connect_socket( $socket ), 'connect_socket: ks2ipv6.lamiral.info port 143 IO::Socket::INET6' ) ; + #$imap->Debug( 1 ) ; + # myprint( $imap->capability( ) ) ; + if ( $imap ) { + $imap->logout( ) ; + } + + #$IO::Socket::SSL::DEBUG = 4 ; + $socket = IO::Socket::SSL->new( + PeerHost => 'ks2ipv6.lamiral.info', + PeerPort => 993, + SSL_verify_mode => SSL_VERIFY_NONE, + ) ; + # myprint( $socket ) ; + ok( $imap = connect_socket( $socket ), 'connect_socket: ks2ipv6.lamiral.info port 993 IO::Socket::SSL' ) ; + #$imap->Debug( 1 ) ; + # myprint( $imap->capability( ) ) ; + # $socket->close( ) ; + if ( $imap ) { + $socket->close( ) ; + } + #$socket->close(SSL_no_shutdown => 1) ; + #$imap->logout( ) ; + #myprint( "\n" ) ; + #$imap->logout( ) ; + } note( 'Leaving tests_connect_socket()' ) ; return ; } -sub connect_socket { +sub connect_socket +{ my( $socket ) = @ARG ; if ( ! defined $socket ) { return ; } - + my $host = $socket->peerhost( ) ; my $port = $socket->peerport( ) ; #print "socket->peerhost: ", $socket->peerhost( ), "\n" ; @@ -4181,37 +5084,48 @@ sub connect_socket { } -sub tests_probe_imapssl { +sub tests_probe_imapssl +{ note( 'Entering tests_probe_imapssl()' ) ; is( undef, probe_imapssl( ), 'probe_imapssl: no args => undef' ) ; is( undef, probe_imapssl( 'unknown' ), 'probe_imapssl: unknown => undef' ) ; SKIP: { - if ( 'CUILLERE' eq hostname() ) { skip( 'Tests avoided on CUILLERE cannot do ipv6', 1 ) ; } + if ( + 'CUILLERE' eq hostname() + or + skip_macosx() + or + -e '/.dockerenv' + ) + { + skip( 'Tests avoided on CUILLERE/macosx.polarhome.com/docker cannot do ipv6', 2 ) ; + } like( probe_imapssl( 'ks2ipv6.lamiral.info' ), qr/^\* OK/, 'probe_imapssl: ks2ipv6.lamiral.info matches "* OK"' ) ; + like( probe_imapssl( 'imap.gmail.com' ), qr/^\* OK/, 'probe_imapssl: imap.gmail.com matches "* OK"' ) ; } ; like( probe_imapssl( 'test1.lamiral.info' ), qr/^\* OK/, 'probe_imapssl: test1.lamiral.info matches "* OK"' ) ; - like( probe_imapssl( 'imap.gmail.com' ), qr/^\* OK/, 'probe_imapssl: imap.gmail.com matches "* OK"' ) ; note( 'Leaving tests_probe_imapssl()' ) ; return ; } -sub probe_imapssl { +sub probe_imapssl +{ my $host = shift ; - + if ( ! $host ) { return ; } - - my $socket = IO::Socket::SSL->new( + + my $socket = IO::Socket::SSL->new( PeerHost => $host, PeerPort => $IMAP_SSL_PORT, SSL_verify_mode => SSL_VERIFY_NONE, ) ; #print "$socket\n" ; if ( ! $socket ) { return ; } - + my $banner ; $socket->sysread( $banner, 65_536 ) ; #print "$banner" ; @@ -4220,7 +5134,8 @@ sub probe_imapssl { } -sub connect_imap { +sub connect_imap +{ my( $host, $port, $mydebugimap, $ssl, $tls, $Side, $mytimeout, $h ) = @_ ; my $imap = Mail::IMAPClient->new( ) ; if ( $ssl ) { set_ssl( $imap, $h ) } @@ -4233,7 +5148,7 @@ sub connect_imap { myprint( "$Side: connecting on $side [$host] port [$port]\n" ) ; $imap->connect( ) - or die_clean( "$Side: Can not open imap connection on [$host]: " . $imap->LastError . " $OS_ERROR\n" ) ; + or exit_clean( $sync, $EXIT_CONNECTION_FAILURE, "$Side: Can not open imap connection on [$host]: " . $imap->LastError . " $OS_ERROR\n" ) ; myprint( "$Side IP address: ", $imap->Socket->peerhost(), "\n" ) ; my $banner = $imap->Results()->[0] ; @@ -4243,14 +5158,15 @@ sub connect_imap { if ( $tls ) { set_tls( $imap, $h ) ; $imap->starttls( ) - or die_clean("$Side: Can not go to tls encryption on $side [$host]:", $imap->LastError, "\n" ) ; + or exit_clean( $sync, $EXIT_TLS_FAILURE, "$Side: Can not go to tls encryption on $side [$host]:", $imap->LastError, "\n" ) ; myprint( "$Side: Socket successfuly converted to SSL\n" ) ; } return( $imap ) ; } -sub login_imap { +sub login_imap +{ my @allargs = @_ ; my( @@ -4265,7 +5181,7 @@ sub login_imap { my $imap = init_imap( @allargs ) ; $imap->connect() - or die_clean("$Side failure: can not open imap connection on $side [$host] with user [$user]: " . $imap->LastError . " $OS_ERROR\n" ) ; + or exit_clean( $mysync, $EXIT_CONNECTION_FAILURE, "$Side failure: can not open imap connection on $side [$host] with user [$user]: " . $imap->LastError . " $OS_ERROR\n" ) ; myprint( "$Side IP address: ", $imap->Socket->peerhost(), "\n" ) ; my $banner = $imap->Results()->[0] ; @@ -4282,14 +5198,14 @@ sub login_imap { $imap->Socket ; myprintf("%s: Assuming PREAUTH for %s\n", $Side, $imap->Server ) ; }else{ - die_clean( "$Side failure: error login on $side [$host] with user [$user] auth [PREAUTH]" ) ; + exit_clean( $mysync, $EXIT_AUTHENTICATION_FAILURE, "$Side failure: error login on $side [$host] with user [$user] auth [PREAUTH]" ) ; } } if ( $tls ) { set_tls( $imap, $h ) ; $imap->starttls( ) - or die_clean("$Side failure: Can not go to tls encryption on $side [$host]:", $imap->LastError, "\n" ) ; + or exit_clean( $mysync, $EXIT_TLS_FAILURE, "$Side failure: Can not go to tls encryption on $side [$host]:", $imap->LastError, "\n" ) ; myprint( "$Side: Socket successfuly converted to SSL\n" ) ; } @@ -4300,46 +5216,52 @@ sub login_imap { } -sub authenticate_imap { - - my($imap, +sub authenticate_imap +{ + my( $imap, $host, $port, $user, $domain, $password, $mydebugimap, $mytimeout, $fastio, $ssl, $tls, $authmech, $authuser, $reconnectretry, $proxyauth, $uid, $split, $Side, $h, $mysync ) = @_ ; check_capability( $imap, $authmech, $Side ) ; + $imap->User( $user ) ; + $imap->Domain( $domain ) if ( defined $domain ) ; + $imap->Authuser( $authuser ) ; + $imap->Password( $password ) ; + + if ( 'X-MASTERAUTH' eq $authmech ) + { + xmasterauth( $imap ) ; + return ; + } if ( $proxyauth ) { $imap->Authmechanism(q{}) ; - $imap->User($authuser) ; + $imap->User( $authuser ) ; } else { $imap->Authmechanism( $authmech ) unless ( $authmech eq 'LOGIN' or $authmech eq 'PREAUTH' ) ; - $imap->User($user) ; } $imap->Authcallback(\&xoauth) if ( 'XOAUTH' eq $authmech ) ; $imap->Authcallback(\&xoauth2) if ( 'XOAUTH2' eq $authmech ) ; $imap->Authcallback(\&plainauth) if ( ( 'PLAIN' eq $authmech ) or ( 'EXTERNAL' eq $authmech ) ) ; - $imap->Domain($domain) if (defined $domain) ; - $imap->Authuser($authuser) ; - $imap->Password($password) ; - unless ( $authmech eq 'PREAUTH' or $imap->login( ) ) { + unless ( $authmech eq 'PREAUTH' or $authmech eq 'X-MASTERAUTH' or $imap->login( ) ) { my $info = "$Side failure: Error login on [$host] with user [$user] auth" ; my $einfo = $imap->LastError || @{$imap->History}[$LAST] ; chomp $einfo ; my $error = "$info [$authmech]: $einfo\n" ; if ( $authmech eq 'LOGIN' or $imap->IsUnconnected( ) or $authuser ) { - die_clean( $error ) ; + exit_clean( $mysync, $EXIT_AUTHENTICATION_FAILURE, $error ) ; }else{ - myprint( $error ) ; + myprint( $error ) ; } myprint( "$Side info: trying LOGIN Auth mechanism on [$host] with user [$user]\n" ) ; $imap->Authmechanism(q{}) ; $imap->login() or - die_clean("$info [LOGIN]: ", $imap->LastError, "\n") ; + exit_clean( $mysync, $EXIT_AUTHENTICATION_FAILURE, "$info [LOGIN]: ", $imap->LastError, "\n") ; } if ( $proxyauth ) { @@ -4347,43 +5269,48 @@ sub authenticate_imap { my $info = "$Side failure: Error doing proxyauth as user [$user] on [$host] using proxy-login as [$authuser]" ; my $einfo = $imap->LastError || @{$imap->History}[$LAST] ; chomp $einfo ; - die_clean( "$info: $einfo\n" ) ; + exit_clean( $mysync, $EXIT_AUTHENTICATION_FAILURE, "$info: $einfo\n" ) ; } } return ; } -sub check_capability { +sub check_capability +{ my( $imap, $authmech, $Side ) = @_ ; - if ($imap->has_capability( "AUTH=$authmech" ) - or $imap->has_capability( $authmech ) ) { + if ( $imap->has_capability( "AUTH=$authmech" ) + or $imap->has_capability( $authmech ) ) + { myprintf("%s: %s says it has CAPABILITY for AUTHENTICATE %s\n", - $Side, $imap->Server, $authmech) ; - return ; + $Side, $imap->Server, $authmech) ; + return ; } - if ( $authmech eq 'LOGIN' ) { - # Well, the warning is so common and useless that I prefer to remove it - # No more "... says it has NO CAPABILITY for AUTHENTICATE LOGIN" - return ; - } + if ( $authmech eq 'LOGIN' ) + { + # Well, the warning is so common and useless that I prefer to remove it + # No more "... says it has NO CAPABILITY for AUTHENTICATE LOGIN" + return ; + } - myprintf( "%s: %s says it has NO CAPABILITY for AUTHENTICATE %s\n", - $Side, $imap->Server, $authmech ) ; + myprintf( "%s: %s says it has NO CAPABILITY for AUTHENTICATE %s\n", + $Side, $imap->Server, $authmech ) ; - if ($authmech eq 'PLAIN') { - myprint( "$Side: frequently PLAIN is only supported with SSL, try --ssl or --tls options\n" ) ; + if ( $authmech eq 'PLAIN' ) + { + myprint( "$Side: frequently PLAIN is only supported with SSL, try --ssl or --tls options\n" ) ; } return ; } -sub set_ssl { +sub set_ssl +{ my ( $imap, $h ) = @_ ; # SSL_version can be # SSLv3 SSLv2 SSLv23 SSLv23:!SSLv2 (last one is the default in IO-Socket-SSL-1.953) @@ -4412,7 +5339,8 @@ sub set_ssl { return ; } -sub set_tls { +sub set_tls +{ my ( $imap, $h ) = @_ ; my $sslargs_hash = $h->{sslargs} ; @@ -4440,7 +5368,8 @@ sub set_tls { -sub init_imap { +sub init_imap +{ my( $host, $port, $user, $domain, $password, $mydebugimap, $mytimeout, $fastio, @@ -4478,7 +5407,8 @@ sub init_imap { } -sub plainauth { +sub plainauth +{ my $code = shift; my $imap = shift; @@ -4514,7 +5444,8 @@ sub plainauth { # # If the password arg ends in .json, it will assume this new json method, otherwise it # will fallback to the "oauth client id;.p12" format it was previously using. -sub xoauth2 { +sub xoauth2 +{ require JSON::WebToken ; require LWP::UserAgent ; require HTML::Entities ; @@ -4532,15 +5463,15 @@ sub xoauth2 { if( $imap->Password =~ /^(.*\.json)$/x ) { my $json = JSON->new( ) ; my $filename = $1; - $debug and myprint( "XOAUTH2 json file: $filename\n" ) ; - open( my $FILE, '<', $filename ) or die_clean( "error [$filename]: $OS_ERROR " ) ; + $sync->{ debug } and myprint( "XOAUTH2 json file: $filename\n" ) ; + open( my $FILE, '<', $filename ) or exit_clean( $sync, $EXIT_AUTHENTICATION_FAILURE, "error [$filename]: $OS_ERROR " ) ; my $jsonfile = $json->decode( join q{}, <$FILE> ) ; close $FILE ; $iss = $jsonfile->{client_id}; $key = $jsonfile->{private_key}; - $debug and myprint( "Service account: $iss\n"); - $debug and myprint( "Private key:\n$key\n"); + $sync->{ debug } and myprint( "Service account: $iss\n"); + $sync->{ debug } and myprint( "Private key:\n$key\n"); } else { # Get iss (service account address), keyfile name, and keypassword if necessary @@ -4549,12 +5480,12 @@ sub xoauth2 { # Assume key password is google default if not provided $keypass = 'notasecret' if not $keypass; - $debug and myprint( "Service account: $iss\nKey file: $keyfile\nKey password: $keypass\n"); + $sync->{ debug } and myprint( "Service account: $iss\nKey file: $keyfile\nKey password: $keypass\n"); # Get private key from p12 file (would be better in perl...) $key = `openssl pkcs12 -in "$keyfile" -nodes -nocerts -passin pass:$keypass -nomacver`; - $debug and myprint( "Private key:\n$key\n"); + $sync->{ debug } and myprint( "Private key:\n$key\n"); } # Create jwt of oauth2 request @@ -4578,9 +5509,9 @@ sub xoauth2 { assertion => $jwt } ) ; unless( $response->is_success( ) ) { - die_clean( $response->code, "\n", $response->content, "\n" ) ; + exit_clean( $sync, $EXIT_AUTHENTICATION_FAILURE, $response->code, "\n", $response->content, "\n" ) ; }else{ - $debug and myprint( $response->content ) ; + $sync->{ debug } and myprint( $response->content ) ; } # access_token in response is what we need @@ -4589,7 +5520,7 @@ sub xoauth2 { # format as oauth2 auth data my $xoauth2_string = encode_base64( 'user=' . $imap->User . "\1auth=Bearer " . $data->{access_token} . "\1\1", q{} ) ; - $debug and myprint( "XOAUTH2 String: $xoauth2_string\n"); + $sync->{ debug } and myprint( "XOAUTH2 String: $xoauth2_string\n"); return($xoauth2_string); } @@ -4597,7 +5528,8 @@ sub xoauth2 { # xoauth() thanks to Eduardo Bortoluzzi Junior -sub xoauth { +sub xoauth +{ require URI::Escape ; require Data::Uniqid ; @@ -4612,7 +5544,7 @@ sub xoauth { # For Google Apps, the consumer key is the primary domain # TODO: create a command line argument to define the consumer key my @user_parts = split /@/x, $imap->User ; - $debug and myprint( "XOAUTH: consumer key: $user_parts[1]\n" ) ; + $sync->{ debug } and myprint( "XOAUTH: consumer key: $user_parts[1]\n" ) ; # All the parameters needed to be signed on the XOAUTH my %hash = (); @@ -4637,7 +5569,7 @@ sub xoauth { } $base .= URI::Escape::uri_escape($baseparms); - $debug and myprint( "XOAUTH: base request to sign: $base\n" ) ; + $sync->{ debug } and myprint( "XOAUTH: base request to sign: $base\n" ) ; # Sign it with the consumer secret, informed on the command line (password) my $digest = hmac_sha1( $base, URI::Escape::uri_escape( $imap->Password ) . q{&} ) ; @@ -4662,31 +5594,91 @@ sub xoauth { $string .= $baseparms; - $debug and myprint( "XOAUTH: authentication string: $string\n" ) ; + $sync->{ debug } and myprint( "XOAUTH: authentication string: $string\n" ) ; # It must be base64 encoded return encode_base64("$string", q{}); } +sub xmasterauth +{ + # This is Kerio auth admin + # This code comes from + # https://github.com/imapsync/imapsync/pull/53/files -sub banner_imapsync { + my $imap = shift ; - my @argv = @_ ; + my $user = $imap->User( ) ; + my $password = $imap->Password( ) ; + my $authmech = 'X-MASTERAUTH' ; + + my @challenge = $imap->tag_and_run( $authmech, "+" ) ; + if ( not defined $challenge[0] ) + { + exit_clean( $sync, $EXIT_AUTHENTICATION_FAILURE, "Failure authenticate with $authmech: ", $imap->LastError, "\n") ; + return ; # hahaha! + } + $sync->{ debug } and myprint( "X-MASTERAUTH challenge: [@challenge]\n" ) ; + + $challenge[1] =~ s/^\+ |^\s+|\s+$//g ; + $imap->_imap_command( { addcrlf => 1, addtag => 0, tag => $imap->Count }, md5_hex( $challenge[1] . $password ) ) + or exit_clean( $sync, $EXIT_AUTHENTICATION_FAILURE, "Failure authenticate with $authmech: ", $imap->LastError, "\n") ; + + $imap->tag_and_run( 'X-SETUSER ' . $user ) + or exit_clean( $sync, $EXIT_AUTHENTICATION_FAILURE, "Failure authenticate with $authmech: ", "X-SETUSER ", $imap->LastError, "\n") ; + + $imap->State( Mail::IMAPClient::Authenticated ) ; + # I comment this state because "Selected" state is usually done by SELECT or EXAMINE imap commands + # $imap->State( Mail::IMAPClient::Selected ) ; + + return ; +} + + +sub tests_do_valid_directory +{ + note( 'Entering tests_do_valid_directory()' ) ; + + Readonly my $NB_UNIX_tests_do_valid_directory => 2 ; + SKIP: { + skip( 'Tests only for Unix', $NB_UNIX_tests_do_valid_directory ) if ( 'MSWin32' eq $OSNAME ) ; + ok( 1 == do_valid_directory( '.'), 'do_valid_directory: . good' ) ; + ok( 1 == do_valid_directory( './W/tmp/tests/valid/sub'), 'do_valid_directory: ./W/tmp/tests/valid/sub good' ) ; + } + Readonly my $NB_UNIX_tests_do_valid_directory_non_root => 2 ; + SKIP: { + skip( 'Tests only for Unix', $NB_UNIX_tests_do_valid_directory_non_root ) if ( 'MSWin32' eq $OSNAME or '0' eq $EFFECTIVE_USER_ID ) ; + diag( 'Error / not writable is on purpose' ) ; + ok( 0 == do_valid_directory( '/'), 'do_valid_directory: / bad' ) ; + diag( 'Error permission denied on /noway is on purpose' ) ; + ok( 0 == do_valid_directory( '/noway'), 'do_valid_directory: /noway bad' ) ; + } + + + note( 'Leaving tests_do_valid_directory()' ) ; + return ; +} + +sub banner_imapsync +{ + my $mysync = shift @ARG ; + my @argv = @ARG ; my $banner_imapsync = join q{}, q{$RCSfile: imapsync,v $ }, - q{$Revision: 1.882 $ }, - q{$Date: 2018/05/05 21:10:43 $ }, - "\n", - "Command line used:\n", - "$PROGRAM_NAME ", command_line_nopassword( @argv ), "\n" ; + q{$Revision: 1.937 $ }, + q{$Date: 2019/05/01 22:14:00 $ }, + "\n", + "Command line used, run by $EXECUTABLE_NAME:\n", + "$PROGRAM_NAME ", command_line_nopassword( $mysync, @argv ), "\n" ; return( $banner_imapsync ) ; } -sub do_valid_directory { - my $dir = shift; +sub do_valid_directory +{ + my $dir = shift @ARG ; # all good => return ok. return( 1 ) if ( -d $dir and -r _ and -w _ ) ; @@ -4711,31 +5703,11 @@ sub do_valid_directory { return( 0 ) ; } -sub tests_do_valid_directory { - note( 'Entering tests_do_valid_directory()' ) ; - Readonly my $NB_UNIX_tests_do_valid_directory => 2 ; - SKIP: { - skip( 'Tests only for Unix', $NB_UNIX_tests_do_valid_directory ) if ( 'MSWin32' eq $OSNAME ) ; - ok( 1 == do_valid_directory( '.'), 'do_valid_directory: . good' ) ; - ok( 1 == do_valid_directory( './W/tmp/tests/valid/sub'), 'do_valid_directory: ./W/tmp/tests/valid/sub good' ) ; - } - Readonly my $NB_UNIX_tests_do_valid_directory_non_root => 2 ; - SKIP: { - skip( 'Tests only for Unix', $NB_UNIX_tests_do_valid_directory_non_root ) if ( 'MSWin32' eq $OSNAME or '0' eq $EFFECTIVE_USER_ID ) ; - diag( 'Error / not writable is on purpose' ) ; - ok( 0 == do_valid_directory( '/'), 'do_valid_directory: / bad' ) ; - diag( 'Error permission denied on /noway is on purpose' ) ; - ok( 0 == do_valid_directory( '/noway'), 'do_valid_directory: /noway bad' ) ; - } - - - note( 'Leaving tests_do_valid_directory()' ) ; - return ; -} +sub tests_match_a_pid_number +{ + note( 'Entering tests_match_a_pid_number()' ) ; - -sub tests_match_a_pid_number { is( undef, match_a_pid_number( ), 'match_a_pid_number: no args => undef' ) ; is( undef, match_a_pid_number( '' ), 'match_a_pid_number: "" => undef' ) ; is( undef, match_a_pid_number( 'lalala' ), 'match_a_pid_number: lalala => undef' ) ; @@ -4743,23 +5715,31 @@ sub tests_match_a_pid_number { is( 1, match_a_pid_number( 123 ), 'match_a_pid_number: 123 => 1' ) ; is( 1, match_a_pid_number( '123' ), 'match_a_pid_number: "123" => 1' ) ; is( undef, match_a_pid_number( 'a123' ), 'match_a_pid_number: a123 => undef' ) ; - is( 1, match_a_pid_number( 65535 ), 'match_a_pid_number: 65535 => 1' ) ; + is( 1, match_a_pid_number( 99999 ), 'match_a_pid_number: 99999 => 1' ) ; is( undef, match_a_pid_number( 0 ), 'match_a_pid_number: 0 => undef' ) ; - is( undef, match_a_pid_number( 65536 ), 'match_a_pid_number: 65536 => undef' ) ; - is( undef, match_a_pid_number( 99999 ), 'match_a_pid_number: 99999 => undef' ) ; + is( undef, match_a_pid_number( 100000 ), 'match_a_pid_number: 100000 => undef' ) ; + is( undef, match_a_pid_number( 123456 ), 'match_a_pid_number: 123456 => undef' ) ; + + note( 'Leaving tests_match_a_pid_number()' ) ; return ; } -sub match_a_pid_number { - my $pid = shift ; +sub match_a_pid_number +{ + my $pid = shift @ARG ; if ( ! $pid ) { return ; } if ( ! match( $pid, '^\d+$' ) ) { return ; } if ( 0 > $pid ) { return ; } - if ( 65535 < $pid ) { return ; } + #if ( 65535 < $pid ) { return ; } + if ( 99999 < $pid ) { return ; } return 1 ; } -sub tests_remove_pidfile_not_running { +sub tests_remove_pidfile_not_running +{ + note( 'Entering tests_remove_pidfile_not_running()' ) ; + + ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'remove_pidfile_not_running: mkpath W/tmp/tests/' ) ; is( undef, remove_pidfile_not_running( ), 'remove_pidfile_not_running: no args => undef' ) ; is( undef, remove_pidfile_not_running( './W' ), 'remove_pidfile_not_running: a dir => undef' ) ; is( undef, remove_pidfile_not_running( 'noexists' ), 'remove_pidfile_not_running: noexists => undef' ) ; @@ -4771,24 +5751,26 @@ sub tests_remove_pidfile_not_running { is( 1, remove_pidfile_not_running( 'W/tmp/tests/notrunning.pid' ), 'remove_pidfile_not_running: W/tmp/tests/notrunning.pid => 1' ) ; is( $PROCESS_ID, string_to_file( $PROCESS_ID, 'W/tmp/tests/running.pid' ), 'remove_pidfile_not_running: prepa W/tmp/tests/running.pid' ) ; is( undef, remove_pidfile_not_running( 'W/tmp/tests/running.pid' ), 'remove_pidfile_not_running: W/tmp/tests/running.pid => undef' ) ; - + + note( 'Leaving tests_remove_pidfile_not_running()' ) ; return ; } -sub remove_pidfile_not_running { - # - my $pid_filename = shift ; - - if ( ! $pid_filename ) { return } ; - if ( ! -e $pid_filename ) { return } ; - if ( ! -f $pid_filename ) { return } ; - +sub remove_pidfile_not_running +{ + # + my $pid_filename = shift @ARG ; + + if ( ! $pid_filename ) { myprint( "No variable pid_filename\n" ) ; return } ; + if ( ! -e $pid_filename ) { myprint( "File $pid_filename does not exist\n" ) ; return } ; + if ( ! -f $pid_filename ) { myprint( "File $pid_filename is not a file\n" ) ; return } ; + my $pid = firstline( $pid_filename ) ; - if ( ! match_a_pid_number( $pid ) ) { return } ; + if ( ! match_a_pid_number( $pid ) ) { myprint( "pid $pid in $pid_filename is not a number\n" ) ; return } ; # can't kill myself => do nothing - if ( ! kill 'ZERO', $PROCESS_ID ) { return } ; - - # can't kill the pid => it is gone or own by another user => remove pidfile + if ( ! kill 'ZERO', $PROCESS_ID ) { myprint( "Can not kill ZERO myself $PROCESS_ID\n" ) ; return } ; + + # can't kill ZERO the pid => it is gone or own by another user => remove pidfile if ( ! kill 'ZERO', $pid ) { myprint( "Removing old $pid_filename since its PID $pid is not running anymore (oo-killed?)\n" ) ; if ( unlink $pid_filename ) { @@ -4803,110 +5785,234 @@ sub remove_pidfile_not_running { return ; } -sub tests_write_pidfile { + +sub tests_tail +{ + note( 'Entering tests_tail()' ) ; + + ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'tail: mkpath W/tmp/tests/' ) ; + ok( ( ! -e 'W/tmp/tests/tail.pid' || unlink 'W/tmp/tests/tail.pid' ), 'tail: unlink W/tmp/tests/tail.pid' ) ; + ok( ( ! -e 'W/tmp/tests/tail.txt' || unlink 'W/tmp/tests/tail.txt' ), 'tail: unlink W/tmp/tests/tail.txt' ) ; + + is( undef, tail( ), 'tail: no args => undef' ) ; my $mysync ; - - is( 1, write_pidfile( ), 'write_pidfile: no args => 1' ) ; - - $mysync->{pidfile} = '/no/no/no.pid' ; - is( 1, write_pidfile( $mysync ), 'write_pidfile: no permission for /no/no/no.pid, no lock => 1' ) ; - $mysync->{pidfilelocking} = 1 ; - is( undef, write_pidfile( $mysync ), 'write_pidfile: no permission for /no/no/no.pid + lock => undef' ) ; - - $mysync->{pidfile} = 'W/tmp/tests/test.pid' ; - ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'write_pidfile: mkpath W/tmp/tests/' ) ; - is( 1, touch( $mysync->{pidfile} ), 'write_pidfile: lock prepa' ) ; - - $mysync->{pidfilelocking} = 0 ; - is( 1, write_pidfile( $mysync ), 'write_pidfile: W/tmp/tests/test.pid => 1' ) ; - is( $PROCESS_ID, firstline( 'W/tmp/tests/test.pid' ), "write_pidfile: W/tmp/tests/test.pid contains $PROCESS_ID" ) ; + is( undef, tail( $mysync ), 'tail: no pidfile => undef' ) ; + + $mysync->{pidfile} = 'W/tmp/tests/tail.pid' ; + is( undef, tail( $mysync ), 'tail: no pidfilelocking => undef' ) ; $mysync->{pidfilelocking} = 1 ; - is( undef, write_pidfile( $mysync ), 'write_pidfile: W/tmp/tests/test.pid + lock => undef' ) ; - + is( undef, tail( $mysync ), 'tail: pidfile no exists => undef' ) ; + + + my $pidandlog = "33333\nW/tmp/tests/tail.txt\n" ; + is( $pidandlog, string_to_file( $pidandlog, $mysync->{pidfile} ), 'tail: put pid 33333 and tail.txt in pidfile' ) ; + is( undef, tail( $mysync ), 'tail: logfile to tail no exists => undef' ) ; + + my $tailcontent = "L1\nL2\nL3\nL4\nL5\n" ; + is( $tailcontent, string_to_file( $tailcontent, 'W/tmp/tests/tail.txt' ), + 'tail: put L1\nL2\nL3\nL4\nL5\n in W/tmp/tests/tail.txt' ) ; + + is( undef, tail( $mysync ), 'tail: fake pid in pidfile + tail off => 1' ) ; + + $mysync->{ tail } = 1 ; + is( 1, tail( $mysync ), 'tail: fake pid in pidfile + tail on=> 1' ) ; + + # put my own pid, won't do tail + $pidandlog = "$PROCESS_ID\nW/tmp/tests/tail.txt\n" ; + is( $pidandlog, string_to_file( $pidandlog, $mysync->{pidfile} ), 'tail: put my own PID in pidfile' ) ; + is( undef, tail( $mysync ), 'tail: my own pid in pidfile => undef' ) ; + + note( 'Leaving tests_tail()' ) ; return ; } -sub write_pidfile { + + +sub tail +{ + # return undef on failures + # return 1 on success + + my $mysync = shift ; + + # no tail when aborting! + if ( $mysync->{ abort } ) { return ; } + + my $pidfile = $mysync->{pidfile} ; + my $lock = $mysync->{pidfilelocking} ; + my $tail = $mysync->{tail} ; + + if ( ! $pidfile ) { return ; } + if ( ! $lock ) { return ; } + if ( ! $tail ) { return ; } + + my $pidtotail = firstline( $pidfile ) ; + if ( ! $pidtotail ) { return ; } + + + + # It should not happen but who knows... + if ( $pidtotail eq $PROCESS_ID ) { return ; } + + + my $filetotail = secondline( $pidfile ) ; + if ( ! $filetotail ) { return ; } + + if ( ! -r $filetotail ) + { + #myprint( "Error: can not read $filetotail\n" ) ; + return ; + } + + myprint( "Doing a tail -f on $filetotail for processus pid $pidtotail until it is finished.\n" ) ; + my $file = File::Tail->new( + name => $filetotail, + nowait => 1, + interval => 1, + tail => 1, + adjustafter => 2 + ); + + my $moretimes = 200 ; + # print one line at least + my $line = $file->read ; + myprint( $line ) ; + while ( isrunning( $pidtotail, \$moretimes ) and defined( $line = $file->read ) ) + { + myprint( $line ); + sleep( 0.02 ) ; + } + + return 1 ; +} + +sub isrunning +{ + my $pidtocheck = shift ; + my $moretimes_ref = shift ; + + if ( kill 'ZERO', $pidtocheck ) + { + #myprint( "$pidtocheck running\n" ) ; + return 1 ; + } + elsif ( $$moretimes_ref >= 0 ) + { + # continue to consider it running + $$moretimes_ref-- ; + return 1 ; + } + else + { + myprint( "Tailed processus $pidtocheck ended\n" ) ; + return ; + } +} + +sub tests_write_pidfile +{ + note( 'Entering tests_write_pidfile()' ) ; + + my $mysync ; + + is( 1, write_pidfile( ), 'write_pidfile: no args => 1' ) ; + + # no pidfile => ok + $mysync->{pidfile} = q{} ; + is( 1, write_pidfile( $mysync ), 'write_pidfile: no pidfile => undef' ) ; + + # The pidfile path is bad => failure + $mysync->{pidfile} = '/no/no/no.pid' ; + is( undef, write_pidfile( $mysync ), 'write_pidfile: no permission for /no/no/no.pid, no lock => undef' ) ; + + $mysync->{pidfilelocking} = 1 ; + is( undef, write_pidfile( $mysync ), 'write_pidfile: no permission for /no/no/no.pid + lock => undef' ) ; + + $mysync->{pidfile} = 'W/tmp/tests/test.pid' ; + ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'write_pidfile: mkpath W/tmp/tests/' ) ; + is( 1, touch( $mysync->{pidfile} ), 'write_pidfile: lock prepa' ) ; + + $mysync->{pidfilelocking} = 0 ; + is( 1, write_pidfile( $mysync ), 'write_pidfile: W/tmp/tests/test.pid + no lock => 1' ) ; + is( $PROCESS_ID, firstline( 'W/tmp/tests/test.pid' ), "write_pidfile: W/tmp/tests/test.pid contains $PROCESS_ID" ) ; + is( q{}, secondline( 'W/tmp/tests/test.pid' ), "write_pidfile: W/tmp/tests/test.pid contains no second line" ) ; + + $mysync->{pidfilelocking} = 1 ; + is( undef, write_pidfile( $mysync ), 'write_pidfile: W/tmp/tests/test.pid + lock => undef' ) ; + + + $mysync->{pidfilelocking} = 0 ; + $mysync->{ logfile } = 'rrrr.txt' ; + is( 1, write_pidfile( $mysync ), 'write_pidfile: W/tmp/tests/test.pid + no lock + logfile => 1' ) ; + is( $PROCESS_ID, firstline( 'W/tmp/tests/test.pid' ), "write_pidfile: + no lock + logfile W/tmp/tests/test.pid contains $PROCESS_ID" ) ; + is( q{rrrr.txt}, secondline( 'W/tmp/tests/test.pid' ), "write_pidfile: + no lock + logfile W/tmp/tests/test.pid contains rrrr.txt" ) ; + + + note( 'Leaving tests_write_pidfile()' ) ; + return ; +} + + + +sub write_pidfile +{ # returns undef if something is considered fatal # returns 1 otherwise - - if ( ! @ARG ) { return 1 ; } - - my $mysync = shift ; - - # Do not write the pid file if this process goal is to abort the process designed by the pid file - if ( $mysync->{abort} ) { return 1 ; } - - # - my $pid_filename = $mysync->{pidfile} ; - my $lock = $mysync->{pidfilelocking} ; - myprint( "PID file is $pid_filename ( to change it use --pidfile filepath ; to avoid it use --pidfile \"\" )\n" ) ; + if ( ! @ARG ) { return 1 ; } + + my $mysync = shift @ARG ; + + # Do not write the pid file if this process goal is to abort the process designed by the pid file + if ( $mysync->{abort} ) { return 1 ; } + + # + my $pid_filename = $mysync->{ pidfile } ; + my $lock = $mysync->{ pidfilelocking } ; + + if ( ! $pid_filename ) + { + myprint( "PID file is unset ( to set it, use --pidfile filepath ; to avoid it use --pidfile \"\" )\n" ) ; + return( 1 ) ; + } + + myprint( "PID file is $pid_filename ( to change it, use --pidfile filepath ; to avoid it use --pidfile \"\" )\n" ) ; if ( -e $pid_filename and $lock ) { myprint( "$pid_filename already exists, another imapsync may be curently running. Aborting imapsync.\n" ) ; return ; - + } + if ( -e $pid_filename ) { myprint( "$pid_filename already exists, overwriting it ( use --pidfilelocking to avoid concurrent runs )\n" ) ; } + my $pid_string = "$PROCESS_ID\n" ; + my $pid_message = "Writing my PID $PROCESS_ID in $pid_filename\n" ; + + if ( $mysync->{ logfile } ) + { + $pid_string .= "$mysync->{ logfile }\n" ; + $pid_message .= "Writing also my logfile name in $pid_filename : $mysync->{ logfile }\n" ; + } + if ( open my $FILE_HANDLE, '>', $pid_filename ) { - myprint( "Writing my PID $PROCESS_ID in $pid_filename\n" ) ; - print $FILE_HANDLE $PROCESS_ID ; + myprint( $pid_message ) ; + print $FILE_HANDLE $pid_string ; close $FILE_HANDLE ; return( 1 ) ; - } else { - myprint( "Could not open $pid_filename for writing. Check permissions or disk space.\n" ) ; - if ( $lock ) { - return ; - }else{ - return( 1 ) ; - } + } + else + { + myprint( "Could not open $pid_filename for writing. Check permissions or disk space: $OS_ERROR\n" ) ; + return ; } } - -sub remove_tmp_files { - my $mysync = shift or return ; - $mysync->{pidfile} or return ; - if ( -e $mysync->{pidfile} ) { - unlink $mysync->{pidfile} ; - } - return ; -} - - -sub exit_clean { - my $mysync = shift ; - my $status = shift ; - $status = defined $status ? $status : $EXIT_UNKNOWN ; - remove_tmp_files( $mysync ) ; - myprint( "Exiting with return value $status\n" ) ; - if ( $mysync->{log} ) { - myprint( "Log file is $mysync->{logfile} ( to change it, use --logfile filepath ; or use --nolog to turn off logging )\n" ) ; - close $mysync->{logfile_handle} ; - } - exit $status ; -} - -sub die_clean { - my @messages = @_ ; - remove_tmp_files( $sync ) ; - myprint( @messages ) ; - exit 255 ; -} - -sub missing_option { - my ( $option ) = @_ ; - die_clean( "$option option is mandatory, for help run $PROGRAM_NAME --help\n" ) ; - return ; -} - - -sub fix_Inbox_INBOX_mapping { +sub fix_Inbox_INBOX_mapping +{ my( $h1_all, $h2_all ) = @_ ; my $regex = q{} ; @@ -4919,7 +6025,8 @@ sub fix_Inbox_INBOX_mapping { return( $regex ) ; } -sub tests_fix_Inbox_INBOX_mapping { +sub tests_fix_Inbox_INBOX_mapping +{ note( 'Entering tests_fix_Inbox_INBOX_mapping()' ) ; @@ -4954,7 +6061,8 @@ sub tests_fix_Inbox_INBOX_mapping { } -sub jux_utf8_list { +sub jux_utf8_list +{ my @s_inp = @_ ; my $s_out = q{} ; foreach my $s ( @s_inp ) { @@ -4963,19 +6071,21 @@ sub jux_utf8_list { return( $s_out ) ; } -sub tests_jux_utf8_list { +sub tests_jux_utf8_list +{ note( 'Entering tests_jux_utf8_list()' ) ; ok( q{} eq jux_utf8_list( ), 'jux_utf8_list: void' ) ; ok( "[]\n" eq jux_utf8_list( q{} ), 'jux_utf8_list: empty string' ) ; ok( "[INBOX]\n" eq jux_utf8_list( 'INBOX' ), 'jux_utf8_list: INBOX' ) ; - ok( "[&ANY-] = [Ö]\n" eq jux_utf8_list( '&ANY-' ), 'jux_utf8_list: &ANY-' ) ; + ok( "[&ANY-] = [Ö]\n" eq jux_utf8_list( '&ANY-' ), 'jux_utf8_list: &ANY-' ) ; note( 'Leaving tests_jux_utf8_list()' ) ; return( 0 ) ; } -sub jux_utf8 { +sub jux_utf8 +{ # juxtapose utf8 at the right if different my ( $s_utf7 ) = shift ; my ( $s_utf8 ) = imap_utf7_decode( $s_utf7 ) ; @@ -4990,15 +6100,16 @@ sub jux_utf8 { } # editing utf8 can be tricky without an utf8 editor -sub tests_jux_utf8 { +sub tests_jux_utf8 +{ note( 'Entering tests_jux_utf8()' ) ; ok( '[INBOX]' eq jux_utf8( 'INBOX'), 'jux_utf8: INBOX => [INBOX]' ) ; - ok( '[&ZTZO9nux-] = [收件箱]' eq jux_utf8( '&ZTZO9nux-'), 'jux_utf8: => [&ZTZO9nux-] = [收件箱]' ) ; - ok( '[&ANY-] = [Ö]' eq jux_utf8( '&ANY-'), 'jux_utf8: &ANY- => [&ANY-] = [Ö]' ) ; + ok( '[&ZTZO9nux-] = [收件箱]' eq jux_utf8( '&ZTZO9nux-'), 'jux_utf8: => [&ZTZO9nux-] = [收件箱]' ) ; + ok( '[&ANY-] = [Ö]' eq jux_utf8( '&ANY-'), 'jux_utf8: &ANY- => [&ANY-] = [Ö]' ) ; ok( '[]' eq jux_utf8( q{} ), 'jux_utf8: void => []' ) ; - ok( '[+BD8EQAQ1BDQEOwQ+BDM-] = [предлог]' eq jux_utf8( '+BD8EQAQ1BDQEOwQ+BDM-' ), 'jux_utf8: => [+BD8EQAQ1BDQEOwQ+BDM-] = [предлог]' ) ; - ok( '[&BB8EQAQ+BDUEOgRC-] = [Проект]' eq jux_utf8( '&BB8EQAQ+BDUEOgRC-' ), 'jux_utf8: => [&BB8EQAQ+BDUEOgRC-] = [Проект]' ) ; + ok( '[+BD8EQAQ1BDQEOwQ+BDM-] = [предлог]' eq jux_utf8( '+BD8EQAQ1BDQEOwQ+BDM-' ), 'jux_utf8: => [+BD8EQAQ1BDQEOwQ+BDM-] = [предлог]' ) ; + ok( '[&BB8EQAQ+BDUEOgRC-] = [Проект]' eq jux_utf8( '&BB8EQAQ+BDUEOgRC-' ), 'jux_utf8: => [&BB8EQAQ+BDUEOgRC-] = [Проект]' ) ; note( 'Leaving tests_jux_utf8()' ) ; return ; @@ -5007,7 +6118,8 @@ sub tests_jux_utf8 { # Copied from http://cpansearch.perl.org/src/FABPOT/Unicode-IMAPUtf7-2.01/lib/Unicode/IMAPUtf7.pm # and then fixed with # https://rt.cpan.org/Public/Bug/Display.html?id=11172 -sub imap_utf7_decode { +sub imap_utf7_decode +{ my ( $s ) = shift ; # Algorithm @@ -5020,7 +6132,8 @@ sub imap_utf7_decode { return( Unicode::String::utf7( $s )->utf8 ) ; } -sub imap_utf7_encode { +sub imap_utf7_encode +{ my ( $s ) = @_ ; $s = Unicode::String::utf8( $s )->utf7 ; @@ -5034,13 +6147,14 @@ sub imap_utf7_encode { -sub select_folder { - my ( $imap, $folder, $hostside ) = @_ ; +sub select_folder +{ + my ( $mysync, $imap, $folder, $hostside ) = @_ ; if ( ! $imap->select( $folder ) ) { my $error = join q{}, "$hostside folder $folder: Could not select: ", $imap->LastError, "\n" ; - errors_incr( $sync, $error ) ; + errors_incr( $mysync, $error ) ; return( 0 ) ; }else{ # ok select succeeded @@ -5048,13 +6162,14 @@ sub select_folder { } } -sub examine_folder { - my ( $imap, $folder, $hostside ) = @_ ; +sub examine_folder +{ + my ( $mysync, $imap, $folder, $hostside ) = @_ ; if ( ! $imap->examine( $folder ) ) { my $error = join q{}, "$hostside folder $folder: Could not examine: ", $imap->LastError, "\n" ; - errors_incr( $sync, $error ) ; + errors_incr( $mysync, $error ) ; return( 0 ) ; }else{ # ok select succeeded @@ -5063,10 +6178,9 @@ sub examine_folder { } - - -sub count_from_select { - my @lines = @_ ; +sub count_from_select +{ + my @lines = @ARG ; my $count ; foreach my $line ( @lines ) { #myprint( "line = [$line]\n" ) ; @@ -5080,23 +6194,10 @@ sub count_from_select { - - - - - - - - - - - - - - - -sub create_folder_old { - my( $imap, $h2_fold, $h1_fold ) = @_ ; +sub create_folder_old +{ + my $mysync = shift @ARG ; + my( $imap, $h2_fold, $h1_fold ) = @ARG ; myprint( "Creating (old way) folder [$h2_fold] on host2\n" ) ; if ( ( 'INBOX' eq uc $h2_fold ) @@ -5104,12 +6205,12 @@ sub create_folder_old { myprint( "Folder [$h2_fold] already exists\n" ) ; return( 1 ) ; } - if ( ! $sync->{dry} ){ + if ( ! $mysync->{dry} ){ if ( ! $imap->create( $h2_fold ) ) { my $error = join q{}, "Could not create folder [$h2_fold] from [$h1_fold]: ", $imap->LastError( ), "\n" ; - errors_incr( $sync, $error ) ; + errors_incr( $mysync, $error ) ; # success if folder exists ("already exists" error) return( 1 ) if $imap->exists( $h2_fold ) ; # failure since create failed @@ -5121,58 +6222,60 @@ sub create_folder_old { } }else{ # dry mode, no folder so many imap will fail, assuming failure - myprint( "Created ( the old way ) folder [$h2_fold] on host2 $sync->{dry_message}\n" ) ; + myprint( "Created ( the old way ) folder [$h2_fold] on host2 $mysync->{dry_message}\n" ) ; return( 0 ) ; } } -sub create_folder { - my( $imap2 , $h2_fold , $h1_fold ) = @_ ; +sub create_folder +{ + my $mysync = shift @ARG ; + my( $myimap2 , $h2_fold , $h1_fold ) = @ARG ; my( @parts , $parent ) ; - if ( $imap2->IsUnconnected( ) ) { + if ( $myimap2->IsUnconnected( ) ) { myprint( "Host2: Unconnected state\n" ) ; return( 0 ) ; } if ( $create_folder_old ) { - return( create_folder_old( $imap2 , $h2_fold , $h1_fold ) ) ; + return( create_folder_old( $mysync, $myimap2 , $h2_fold , $h1_fold ) ) ; } myprint( "Creating folder [$h2_fold] on host2\n" ) ; if ( ( 'INBOX' eq uc $h2_fold ) - and ( $imap2->exists( $h2_fold ) ) ) { + and ( $myimap2->exists( $h2_fold ) ) ) { myprint( "Folder [$h2_fold] already exists\n" ) ; return( 1 ) ; } - if ( $mixfolders and $imap2->exists( $h2_fold ) ) { - myprint( "Folder [$h2_fold] already exists (--nomixfolders is not set)\n" ) ; + if ( $mixfolders and $myimap2->exists( $h2_fold ) ) { + myprint( "Folder [$h2_fold] already exists (--nomixfolders is not set)\n" ) ; return( 1 ) ; } - if ( ( not $mixfolders ) and ( $imap2->exists( $h2_fold ) ) ) { + if ( ( not $mixfolders ) and ( $myimap2->exists( $h2_fold ) ) ) { myprint( "Folder [$h2_fold] already exists and --nomixfolders is set\n" ) ; return( 0 ) ; } - @parts = split /\Q$h2_sep\E/x, $h2_fold ; + @parts = split /\Q$mysync->{ h2_sep }\E/x, $h2_fold ; pop @parts ; - $parent = join $h2_sep, @parts ; + $parent = join $mysync->{ h2_sep }, @parts ; $parent =~ s/^\s+|\s+$//xg ; - if ( ( $parent ne q{} ) and ( ! $imap2->exists( $parent ) ) ) { - create_folder( $imap2 , $parent , $h1_fold ) ; + if ( ( $parent ne q{} ) and ( ! $myimap2->exists( $parent ) ) ) { + create_folder( $mysync, $myimap2 , $parent , $h1_fold ) ; } - if ( ! $sync->{dry} ) { - if ( ! $imap2->create( $h2_fold ) ) { + if ( ! $mysync->{dry} ) { + if ( ! $myimap2->create( $h2_fold ) ) { my $error = join q{}, "Could not create folder [$h2_fold] from [$h1_fold]: " , - $imap2->LastError( ), "\n" ; - errors_incr( $sync, $error ) ; + $myimap2->LastError( ), "\n" ; + errors_incr( $mysync, $error ) ; # success if folder exists ("already exists" error) - return( 1 ) if $imap2->exists( $h2_fold ) ; + return( 1 ) if $myimap2->exists( $h2_fold ) ; # failure since create failed return( 0 ) ; }else{ @@ -5182,8 +6285,8 @@ sub create_folder { } }else{ # dry mode, no folder so many imap will fail, assuming failure - myprint( "Created folder [$h2_fold] on host2 $sync->{dry_message}\n" ) ; - if ( ! $justfolders ) { + myprint( "Created folder [$h2_fold] on host2 $mysync->{dry_message}\n" ) ; + if ( ! $mysync->{ justfolders } ) { myprint( "Since --dry mode is on and folder [$h2_fold] on host2 does not exist yet, syncing messages will not be simulated.\n" . "To simulate message syncing, use --justfolders without --dry to first create the missing folders then rerun the --dry sync.\n" ) ; } @@ -5193,14 +6296,16 @@ sub create_folder { -sub tests_folder_routines { +sub tests_folder_routines +{ note( 'Entering tests_folder_routines()' ) ; ok( !is_requested_folder('folder_foo'), 'is_requested_folder folder_foo 1' ); ok( add_to_requested_folders('folder_foo'), 'add_to_requested_folders folder_foo' ); ok( is_requested_folder('folder_foo'), 'is_requested_folder folder_foo 2' ); ok( !is_requested_folder('folder_NO_EXIST'), 'is_requested_folder folder_NO_EXIST' ); - ok( !remove_from_requested_folders('folder_foo'), 'removed folder_foo' ); + + is_deeply( [ 'folder_foo' ], [ remove_from_requested_folders( 'folder_foo' ) ], 'removed folder_foo => folder_foo' ) ; ok( !is_requested_folder('folder_foo'), 'is_requested_folder folder_foo 3' ); my @f ; ok( @f = add_to_requested_folders('folder_bar', 'folder_toto'), "add result: @f" ); @@ -5208,53 +6313,69 @@ sub tests_folder_routines { ok( is_requested_folder('folder_toto'), 'is_requested_folder 5' ); ok( remove_from_requested_folders('folder_toto'), 'remove_from_requested_folders: ' ); ok( !is_requested_folder('folder_toto'), 'is_requested_folder 6' ); - ok( !remove_from_requested_folders('folder_bar'), 'remove_from_requested_folders: empty' ) ; + + is_deeply( [ 'folder_bar' ], [ remove_from_requested_folders('folder_bar') ], 'remove_from_requested_folders: empty' ) ; ok( 0 == compare_lists( [ sort_requested_folders( ) ], [] ), 'sort_requested_folders: all empty' ) ; - ok( add_to_requested_folders('M_55'), 'add_to_requested_folders M_55' ); - ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'M_55' ] ), 'sort_requested_folders: middle' ) ; + ok( add_to_requested_folders( 'A_99', 'M_55', 'Z_11' ), 'add_to_requested_folders M_55 Z_11' ); + ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'A_99', 'M_55', 'Z_11' ] ), 'sort_requested_folders: middle' ) ; + + @folderfirst = ( 'Z_11' ) ; - ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'Z_11', 'M_55' ] ), 'sort_requested_folders: first+middle' ) ; + + ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'Z_11', 'A_99', 'M_55' ] ), 'sort_requested_folders: first+middle' ) ; + + is_deeply( [ 'Z_11', 'A_99', 'M_55' ], [ sort_requested_folders( ) ], 'sort_requested_folders: first+middle is_deeply' ) ; + @folderlast = ( 'A_99' ) ; ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'Z_11', 'M_55', 'A_99' ] ), 'sort_requested_folders: first+middle+last 1' ) ; - ok( add_to_requested_folders('M_55', 'M_44',), 'add_to_requested_folders M_55 M_44' ); - ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'Z_11', 'M_44', 'M_55', 'A_99' ] ), 'sort_requested_folders: first+middle+last 2' ) ; + ok( add_to_requested_folders('M_55', 'M_44',), 'add_to_requested_folders M_55 M_44' ) ; + + ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'Z_11', 'M_44', 'M_55', 'A_99'] ), 'sort_requested_folders: first+middle+last 2' ) ; + + + ok( add_to_requested_folders('A_88', 'Z_22',), 'add_to_requested_folders A_88 Z_22' ) ; @folderfirst = qw( Z_22 Z_11 ) ; @folderlast = qw( A_99 A_88 ) ; ok( 0 == compare_lists( [ sort_requested_folders( ) ], [ 'Z_22', 'Z_11', 'M_44', 'M_55', 'A_99', 'A_88' ] ), 'sort_requested_folders: first+middle+last 3' ) ; + undef @folderfirst ; + undef @folderlast ; note( 'Leaving tests_folder_routines()' ) ; return ; } -sub sort_requested_folders { +sub sort_requested_folders +{ my @requested_folders_sorted = () ; - foreach my $folder ( @folderfirst ) { - remove_from_requested_folders( $folder ) ; - } + #myprint "folderfirst: @folderfirst\n" ; + my @folderfirst_requested = remove_from_requested_folders( @folderfirst ) ; + #myprint "folderfirst_requested: @folderfirst_requested\n" ; - foreach my $folder ( @folderlast ) { - remove_from_requested_folders( $folder ) ; - } + my @folderlast_requested = remove_from_requested_folders( @folderlast ) ; my @middle = sort keys %requested_folder ; - @requested_folders_sorted = ( @folderfirst, @middle, @folderlast ) ; + @requested_folders_sorted = ( @folderfirst_requested, @middle, @folderlast_requested ) ; + #myprint "requested_folders_sorted: @requested_folders_sorted\n" ; + add_to_requested_folders( @requested_folders_sorted ) ; return( @requested_folders_sorted ) ; } -sub is_requested_folder { +sub is_requested_folder +{ my ( $folder ) = @_; - return( defined $requested_folder{ $folder } ) ; + return( defined $requested_folder{ $folder } ) ; } -sub add_to_requested_folders { +sub add_to_requested_folders +{ my @wanted_folders = @_ ; foreach my $folder ( @wanted_folders ) { @@ -5263,16 +6384,65 @@ sub add_to_requested_folders { return( keys %requested_folder ) ; } -sub remove_from_requested_folders { - my @wanted_folders = @_ ; +sub tests_remove_from_requested_folders +{ + note( 'Entering tests_remove_from_requested_folders()' ) ; - foreach my $folder ( @wanted_folders ) { - delete $requested_folder{ $folder } ; - } - return( keys %requested_folder ) ; + is( undef, undef, 'remove_from_requested_folders: undef is undef' ) ; + is_deeply( [], [ remove_from_requested_folders( ) ], 'remove_from_requested_folders: no args' ) ; + %requested_folder = ( + 'F1' => 1, + ) ; + is_deeply( [], [ remove_from_requested_folders( ) ], 'remove_from_requested_folders: remove nothing among F1 => nothing' ) ; + is_deeply( [], [ remove_from_requested_folders( 'Fno' ) ], 'remove_from_requested_folders: remove Fno among F1 => nothing' ) ; + is_deeply( [ 'F1' ], [ remove_from_requested_folders( 'F1' ) ], 'remove_from_requested_folders: remove F1 among F1 => F1' ) ; + is_deeply( { }, { %requested_folder }, 'remove_from_requested_folders: remove F1 among F1 => %requested_folder emptied' ) ; + + %requested_folder = ( + 'F1' => 1, + 'F2' => 1, + ) ; + is_deeply( [], [ remove_from_requested_folders( ) ], 'remove_from_requested_folders: remove nothing among F1 F2 => nothing' ) ; + is_deeply( [], [ remove_from_requested_folders( 'Fno' ) ], 'remove_from_requested_folders: remove Fno among F1 F2 => nothing' ) ; + is_deeply( [ 'F1' ], [ remove_from_requested_folders( 'F1' ) ], 'remove_from_requested_folders: remove F1 among F1 F2 => F1' ) ; + is_deeply( { 'F2' => 1 }, { %requested_folder }, 'remove_from_requested_folders: remove F1 among F1 F2 => %requested_folder F2' ) ; + + is_deeply( [], [ remove_from_requested_folders( 'F1' ) ], 'remove_from_requested_folders: remove F1 among F2 => nothing' ) ; + is_deeply( [ 'F2' ], [ remove_from_requested_folders( 'F1', 'F2' ) ], 'remove_from_requested_folders: remove F1 F2 among F2 => F2' ) ; + is_deeply( {}, { %requested_folder }, 'remove_from_requested_folders: remove F1 among F1 F2 => %requested_folder F2' ) ; + + %requested_folder = ( + 'F1' => 1, + 'F2' => 1, + 'F3' => 1, + ) ; + is_deeply( [ 'F1', 'F2' ], [ remove_from_requested_folders( 'F1', 'F2' ) ], 'remove_from_requested_folders: remove F1 F2 among F1 F2 F3 => F1 F2' ) ; + is_deeply( { 'F3' => 1 }, { %requested_folder }, 'remove_from_requested_folders: remove F1 F2 among F1 F2 F3 => %requested_folder F3' ) ; + + + + note( 'Leaving tests_remove_from_requested_folders()' ) ; + return ; } -sub compare_lists { + +sub remove_from_requested_folders +{ + my @unwanted_folders = @_ ; + + my @removed_folders = () ; + foreach my $folder ( @unwanted_folders ) { + if ( exists $requested_folder{ $folder } ) + { + delete $requested_folder{ $folder } ; + push @removed_folders, $folder ; + } + } + return( @removed_folders ) ; +} + +sub compare_lists +{ my ($list_1_ref, $list_2_ref) = @_; return($MINUS_ONE) if ((not defined $list_1_ref) and defined $list_2_ref); @@ -5306,7 +6476,8 @@ sub compare_lists { return 0 ; } -sub tests_compare_lists { +sub tests_compare_lists +{ note( 'Entering tests_compare_lists()' ) ; my $empty_list_ref = []; @@ -5359,7 +6530,8 @@ sub tests_compare_lists { } -sub guess_prefix { +sub guess_prefix +{ my @foldernames = @_ ; my $prefix_guessed = q{} ; @@ -5376,7 +6548,8 @@ sub guess_prefix { return( $prefix_guessed ) ; } -sub tests_guess_prefix { +sub tests_guess_prefix +{ note( 'Entering tests_guess_prefix()' ) ; is( guess_prefix( ), q{}, 'guess_prefix: no args => empty string' ) ; @@ -5396,14 +6569,15 @@ sub tests_guess_prefix { return ; } -sub get_prefix { +sub get_prefix +{ my( $imap, $prefix_in, $prefix_opt, $Side, $folders_ref ) = @_ ; my( $prefix_out, $prefix_guessed ) ; - ( $debug or $sync->{debugfolders} ) and myprint( "$Side: Getting prefix\n" ) ; + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( "$Side: Getting prefix\n" ) ; $prefix_guessed = guess_prefix( @{ $folders_ref } ) ; myprint( "$Side: guessing prefix from folder listing: [$prefix_guessed]\n" ) ; - ( $debug or $sync->{debugfolders} ) and myprint( "$Side: Calling namespace capability\n" ) ; + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( "$Side: Calling namespace capability\n" ) ; if ( $imap->has_capability( 'namespace' ) ) { my $r_namespace = $imap->namespace( ) ; $prefix_out = $r_namespace->[0][0][0] ; @@ -5433,7 +6607,8 @@ sub get_prefix { } -sub guess_separator { +sub guess_separator +{ my @foldernames = @_ ; #return( undef ) unless ( @foldernames ) ; @@ -5447,12 +6622,13 @@ sub guess_separator { $counter{'\\'}++ while ( $folder =~ m{[^\\](\\){1}(?=[^\\])}xg ) ; # count \ } my @race_sorted = sort { $counter{ $b } <=> $counter{ $a } } keys %counter ; - $debug and myprint( "@foldernames\n@race_sorted\n", %counter, "\n" ) ; + $sync->{ debug } and myprint( "@foldernames\n@race_sorted\n", %counter, "\n" ) ; $sep_guessed = shift @race_sorted || $LAST_RESSORT_SEPARATOR ; # / when nothing found. return( $sep_guessed ) ; } -sub tests_guess_separator { +sub tests_guess_separator +{ note( 'Entering tests_guess_separator()' ) ; ok( '/' eq guess_separator( ), 'guess_separator: no args' ) ; @@ -5468,16 +6644,18 @@ sub tests_guess_separator { return ; } -sub get_separator { +sub get_separator +{ my( $imap, $sep_in, $sep_opt, $Side, $folders_ref ) = @_ ; my( $sep_out, $sep_guessed ) ; - ( $debug or $sync->{debugfolders} ) and myprint( "$Side: Getting separator\n" ) ; + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( "$Side: Getting separator\n" ) ; $sep_guessed = guess_separator( @{ $folders_ref } ) ; myprint( "$Side: guessing separator from folder listing: [$sep_guessed]\n" ) ; - ( $debug or $sync->{debugfolders} ) and myprint( "$Side: calling namespace capability\n" ) ; - if ( $imap->has_capability( 'namespace' ) ) { + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( "$Side: calling namespace capability\n" ) ; + if ( $imap->has_capability( 'namespace' ) ) + { $sep_out = $imap->separator( ) ; if ( defined $sep_out ) { myprint( "$Side: separator given by NAMESPACE: [$sep_out]\n" ) ; @@ -5501,7 +6679,8 @@ sub get_separator { } } } - else{ + else + { if ( defined $sep_in ) { myprint( "$Side: No NAMESPACE capability but using [$sep_in] given by $sep_opt\n" ) ; $sep_out = $sep_in ; @@ -5516,7 +6695,8 @@ sub get_separator { return ; } -sub help_to_guess_sep { +sub help_to_guess_sep +{ my( $imap, $sep_opt ) = @_ ; my $help_to_guess_sep = "You can set the separator character with the $sep_opt option,\n" @@ -5526,7 +6706,8 @@ sub help_to_guess_sep { return( $help_to_guess_sep ) ; } -sub help_to_guess_prefix { +sub help_to_guess_prefix +{ my( $imap, $prefix_opt ) = @_ ; my $help_to_guess_prefix = "You can set the prefix namespace with the $prefix_opt option,\n" @@ -5537,7 +6718,8 @@ sub help_to_guess_prefix { } -sub folders_list_to_help { +sub folders_list_to_help +{ my( $imap ) = shift ; my @folders = $imap->folders ; @@ -5545,122 +6727,462 @@ sub folders_list_to_help { return( $listing ) ; } +sub private_folders_separators_and_prefixes +{ +# what are the private folders separators and prefixes for each server ? + + ( $sync->{ debug } or $sync->{debugfolders} ) and myprint( "Getting separators\n" ) ; + $sync->{ h1_sep } = get_separator( $sync->{imap1}, $sync->{ sep1 }, '--sep1', 'Host1', \@h1_folders_all ) ; + $sync->{ h2_sep } = get_separator( $sync->{imap2}, $sync->{ sep2 }, '--sep2', 'Host2', \@h2_folders_all ) ; -sub tests_imap2_folder_name { - note( 'Entering tests_imap2_folder_name()' ) ; + $sync->{ h1_prefix } = get_prefix( $sync->{imap1}, $prefix1, '--prefix1', 'Host1', \@h1_folders_all ) ; + $sync->{ h2_prefix } = get_prefix( $sync->{imap2}, $prefix2, '--prefix2', 'Host2', \@h2_folders_all ) ; -$sync->{ h1_prefix } = $sync->{ h2_prefix } = q{} ; -$h1_sep = '/'; -$h2_sep = '.'; - -$debug and myprint( <<"EOS" -prefix1: [$sync->{ h1_prefix }] -prefix2: [$sync->{ h2_prefix }] -sep1:[$h1_sep] -sep2:[$h2_sep] -EOS -) ; - -$fixslash2 = 0 ; -ok(q{} eq imap2_folder_name(q{}), 'imap2_folder_name: empty string'); -ok('blabla' eq imap2_folder_name('blabla'), 'imap2_folder_name: blabla'); -ok('spam.spam' eq imap2_folder_name('spam/spam'), 'imap2_folder_name: spam/spam'); -ok('spam/spam' eq imap2_folder_name('spam.spam'), 'imap2_folder_name: spam.spam'); -ok('spam.spam/spam' eq imap2_folder_name('spam/spam.spam'), 'imap2_folder_name: spam/spam.spam'); -ok('s pam.spam/sp am' eq imap2_folder_name('s pam/spam.sp am'), 'imap2_folder_name: s pam/spam.sp am'); - -$sync->{f1f2h}{ 'auto' } = 'moto' ; -ok( 'moto' eq imap2_folder_name( 'auto' ), 'imap2_folder_name: auto' ) ; -$sync->{f1f2h}{ 'auto/auto' } = 'moto x 2' ; -ok( 'moto x 2' eq imap2_folder_name( 'auto/auto' ), 'imap2_folder_name: auto/auto' ) ; - -@regextrans2 = ('s,/,X,g'); -ok(q{} eq imap2_folder_name(q{}), 'imap2_folder_name: empty string [s,/,X,g]'); -ok('blabla' eq imap2_folder_name('blabla'), 'imap2_folder_name: blabla [s,/,X,g]'); -ok('spam.spam' eq imap2_folder_name('spam/spam'), 'imap2_folder_name: spam/spam [s,/,X,g]'); -ok('spamXspam' eq imap2_folder_name('spam.spam'), 'imap2_folder_name: spam.spam [s,/,X,g]'); -ok('spam.spamXspam' eq imap2_folder_name('spam/spam.spam'), 'imap2_folder_name: spam/spam.spam [s,/,X,g]'); - -@regextrans2 = ( 's, ,_,g' ) ; -ok('blabla' eq imap2_folder_name('blabla'), 'imap2_folder_name: blabla [s, ,_,g]'); -ok('bla_bla' eq imap2_folder_name('bla bla'), 'imap2_folder_name: blabla [s, ,_,g]'); - -@regextrans2 = ( q{s,(.*),\U$1,} ) ; -ok( 'BLABLA' eq imap2_folder_name( 'blabla' ), q{imap2_folder_name: blabla [s,\U(.*)\E,$1,]} ) ; - -$fixslash2 = 1 ; -@regextrans2 = ( ) ; -ok(q{} eq imap2_folder_name(q{}), 'imap2_folder_name: empty string'); -ok('blabla' eq imap2_folder_name('blabla'), 'imap2_folder_name: blabla'); -ok('spam.spam' eq imap2_folder_name('spam/spam'), 'imap2_folder_name: spam/spam -> spam.spam'); -ok('spam_spam' eq imap2_folder_name('spam.spam'), 'imap2_folder_name: spam.spam -> spam_spam'); -ok('spam.spam_spam' eq imap2_folder_name('spam/spam.spam'), 'imap2_folder_name: spam/spam.spam -> spam.spam_spam'); -ok('s pam.spam_spa m' eq imap2_folder_name('s pam/spam.spa m'), 'imap2_folder_name: s pam/spam.spa m -> s pam.spam_spa m'); - -$h1_sep = '.'; -$h2_sep = '/'; -ok(q{} eq imap2_folder_name(q{}), 'imap2_folder_name: empty string'); -ok('blabla' eq imap2_folder_name('blabla'), 'imap2_folder_name: blabla'); -ok('spam.spam' eq imap2_folder_name('spam/spam'), 'imap2_folder_name: spam/spam -> spam.spam'); -ok('spam/spam' eq imap2_folder_name('spam.spam'), 'imap2_folder_name: spam.spam -> spam/spam'); -ok('spam.spam/spam' eq imap2_folder_name('spam/spam.spam'), 'imap2_folder_name: spam/spam.spam -> spam.spam/spam'); + myprint( "Host1: separator and prefix: [$sync->{ h1_sep }][$sync->{ h1_prefix }]\n" ) ; + myprint( "Host2: separator and prefix: [$sync->{ h2_sep }][$sync->{ h2_prefix }]\n" ) ; + return ; +} +sub subfolder1 +{ + my $mysync = shift ; + my $subfolder1 = sanitize_subfolder( $mysync->{ subfolder1 } ) ; -$fixslash2 = 0 ; -$sync->{ h1_prefix } = q{ }; + if ( $subfolder1 ) + { + # turns off automap + myprint( "Turning off automapping folders because of --subfolder1\n" ) ; + $mysync->{ automap } = undef ; + myprint( "Sanitizing subfolder1: [$mysync->{ subfolder1 }] => [$subfolder1]\n" ) ; + $mysync->{ subfolder1 } = $subfolder1 ; + add_subfolder1_to_folderrec( $mysync ) || exit_clean( $mysync, $EXIT_SUBFOLDER1_NO_EXISTS ) ; + } + else + { + $mysync->{ subfolder1 } = undef ; + } +} -ok('spam.spam/spam' eq imap2_folder_name('spam/spam.spam'), 'imap2_folder_name: spam/spam.spam -> spam.spam/spam'); -ok('spam.spam/spam' eq imap2_folder_name(' spam/spam.spam'), 'imap2_folder_name: spam/spam.spam -> spam.spam/spam'); +sub subfolder2 +{ + my $mysync = shift ; + my $subfolder2 = sanitize_subfolder( $mysync->{ subfolder2 } ) ; + if ( $subfolder2 ) + { + # turns off automap + myprint( "Turning off automapping folders because of --subfolder2\n" ) ; + $mysync->{ automap } = undef ; + myprint( "Sanitizing subfolder2: [$mysync->{ subfolder2 }] => [$subfolder2]\n" ) ; + $mysync->{ subfolder2 } = $subfolder2 ; + set_regextrans2_for_subfolder2( $mysync ) ; + } + else + { + $mysync->{ subfolder2 } = undef ; + } -$h1_sep = '.' ; -$h2_sep = '/' ; -$sync->{ h1_prefix } = 'INBOX.' ; -$sync->{ h2_prefix } = q{} ; -@regextrans2 = ( q{s,(.*),\U$1,} ) ; -ok( 'BLABLA' eq imap2_folder_name( 'blabla' ), 'imap2_folder_name: blabla' ) ; -ok( 'TEST/TEST/TEST/TEST' eq imap2_folder_name( 'INBOX.TEST.test.Test.tesT' ), 'imap2_folder_name: INBOX.TEST.test.Test.tesT' ) ; -@regextrans2 = ( q{s,(.*),\L$1,} ) ; -ok( 'test/test/test/test' eq imap2_folder_name( 'INBOX.TEST.test.Test.tesT' ), 'imap2_folder_name: INBOX.TEST.test.Test.tesT' ) ; +} + +sub tests_sanitize_subfolder +{ + note( 'Entering tests_sanitize_subfolder()' ) ; + + is( undef, sanitize_subfolder( ), 'sanitize_subfolder: no args => undef' ) ; + is( undef, sanitize_subfolder( '' ), 'sanitize_subfolder: empty => undef' ) ; + is( undef, sanitize_subfolder( ' ' ), 'sanitize_subfolder: blank => undef' ) ; + is( undef, sanitize_subfolder( ' ' ), 'sanitize_subfolder: blanks => undef' ) ; + is( 'abcd', sanitize_subfolder( 'abcd' ), 'sanitize_subfolder: abcd => abcd' ) ; + is( 'ab cd', sanitize_subfolder( ' ab cd ' ), 'sanitize_subfolder: " ab cd " => "ab cd"' ) ; + is( 'abcd', sanitize_subfolder( q{a&~b#\\c[]=d;} ), 'sanitize_subfolder: "a&~b#\\c[]=d;" => "abcd"' ) ; + is( 'aA.b-_ 8c/dD', sanitize_subfolder( 'aA.b-_ 8c/dD' ), 'sanitize_subfolder: aA.b-_ 8c/dD => aA.b-_ 8c/dD' ) ; + note( 'Leaving tests_sanitize_subfolder()' ) ; + return ; +} - note( 'Leaving tests_imap2_folder_name()' ) ; - return ; +sub sanitize_subfolder +{ + my $subfolder = shift ; + if ( ! $subfolder ) + { + return ; + } + # Remove edging blanks + $subfolder =~ s,^ +| +$,,g ; + # Keep only abcd...ABCD...0123... and -_./ + $subfolder =~ tr,-_a-zA-Z0-9./ ,,cd ; + + # A blank subfolder is not a subfolder + if ( ! $subfolder ) + { + return ; + } + else + { + return $subfolder ; + } } -# Global variables to remove: -# $debug -# $sync -sub imap2_folder_name { - my $mysync = $sync ; # will be soon next line - #my $mysync = shift ; + +sub tests_add_subfolder1_to_folderrec +{ + note( 'Entering tests_add_subfolder1_to_folderrec()' ) ; + + is( undef, add_subfolder1_to_folderrec( ), 'add_subfolder1_to_folderrec: undef => undef' ) ; + is_deeply( [], [ add_subfolder1_to_folderrec( ) ], 'add_subfolder1_to_folderrec: no args => empty array' ) ; + @folderrec = () ; + my $mysync = {} ; + is_deeply( [ ], [ add_subfolder1_to_folderrec( $mysync ) ], 'add_subfolder1_to_folderrec: empty => empty array' ) ; + is_deeply( [ ], [ @folderrec ], 'add_subfolder1_to_folderrec: empty => empty folderrec' ) ; + $mysync->{ subfolder1 } = 'SUBI' ; + $h1_folders_all{ 'SUBI' } = 1 ; + $mysync->{ h1_prefix } = 'INBOX/' ; + is_deeply( [ 'SUBI' ], [ add_subfolder1_to_folderrec( $mysync ) ], 'add_subfolder1_to_folderrec: SUBI => SUBI' ) ; + is_deeply( [ 'SUBI' ], [ @folderrec ], 'add_subfolder1_to_folderrec: SUBI => folderrec SUBI ' ) ; + + @folderrec = () ; + $mysync->{ subfolder1 } = 'SUBO' ; + is_deeply( [ ], [ add_subfolder1_to_folderrec( $mysync ) ], 'add_subfolder1_to_folderrec: SUBO no exists => empty array' ) ; + is_deeply( [ ], [ @folderrec ], 'add_subfolder1_to_folderrec: SUBO no exists => empty folderrec' ) ; + $h1_folders_all{ 'INBOX/SUBO' } = 1 ; + is_deeply( [ 'INBOX/SUBO' ], [ add_subfolder1_to_folderrec( $mysync ) ], 'add_subfolder1_to_folderrec: SUBO + INBOX/SUBO exists => INBOX/SUBO' ) ; + is_deeply( [ 'INBOX/SUBO' ], [ @folderrec ], 'add_subfolder1_to_folderrec: SUBO + INBOX/SUBO exists => INBOX/SUBO folderrec' ) ; + + note( 'Leaving tests_add_subfolder1_to_folderrec()' ) ; + return ; +} + + +sub add_subfolder1_to_folderrec +{ + my $mysync = shift ; + if ( ! $mysync || ! $mysync->{ subfolder1 } ) + { + return ; + } + + my $subfolder1 = $mysync->{ subfolder1 } ; + my $subfolder1_extended = $mysync->{ h1_prefix } . $subfolder1 ; + + if ( exists $h1_folders_all{ $subfolder1 } ) + { + myprint( qq{Acting like --folderrec "$subfolder1"\n} ) ; + push @folderrec, $subfolder1 ; + } + elsif ( exists $h1_folders_all{ $subfolder1_extended } ) + { + myprint( qq{Acting like --folderrec "$subfolder1_extended"\n} ) ; + push @folderrec, $subfolder1_extended ; + } + else + { + myprint( qq{Nor folder "$subfolder1" nor "$subfolder1_extended" exists on host1\n} ) ; + } + return @folderrec ; +} + +sub set_regextrans2_for_subfolder2 +{ + my $mysync = shift ; + + + unshift @{ $mysync->{ regextrans2 } }, + q(s,^$mysync->{ h2_prefix }(.*),$mysync->{ h2_prefix }$mysync->{ subfolder2 }$mysync->{ h2_sep }$1,), + q(s,^INBOX$,$mysync->{ h2_prefix }$mysync->{ subfolder2 }$mysync->{ h2_sep }INBOX,), + q(s,^($mysync->{ h2_prefix }){2},$mysync->{ h2_prefix },); + + #myprint( "@{ $mysync->{ regextrans2 } }\n" ) ; + return ; +} + + + +# Looks like no globals here + +sub tests_imap2_folder_name +{ + note( 'Entering tests_imap2_folder_name()' ) ; + + my $mysync = {} ; + $mysync->{ h1_prefix } = q{} ; + $mysync->{ h2_prefix } = q{} ; + $mysync->{ h1_sep } = '/'; + $mysync->{ h2_sep } = '.'; + + $mysync->{ debug } and myprint( <<"EOS" +prefix1: [$mysync->{ h1_prefix }] +prefix2: [$mysync->{ h2_prefix }] +sep1: [$sync->{ h1_sep }] +sep2: [$sync->{ h2_sep }] +EOS +) ; + + $mysync->{ fixslash2 } = 0 ; + is( q{INBOX}, imap2_folder_name( $mysync, q{} ), 'imap2_folder_name: empty string' ) ; + is( 'blabla', imap2_folder_name( $mysync, 'blabla' ), 'imap2_folder_name: blabla' ) ; + is('spam.spam', imap2_folder_name( $mysync, 'spam/spam' ), 'imap2_folder_name: spam/spam' ) ; + + is( 'spam/spam', imap2_folder_name( $mysync, 'spam.spam' ), 'imap2_folder_name: spam.spam') ; + is( 'spam.spam/spam', imap2_folder_name( $mysync, 'spam/spam.spam' ), 'imap2_folder_name: spam/spam.spam' ) ; + is( 's pam.spam/sp am', imap2_folder_name( $mysync, 's pam/spam.sp am' ), 'imap2_folder_name: s pam/spam.sp am' ) ; + + $mysync->{f1f2h}{ 'auto' } = 'moto' ; + is( 'moto', imap2_folder_name( $mysync, 'auto' ), 'imap2_folder_name: auto' ) ; + $mysync->{f1f2h}{ 'auto/auto' } = 'moto x 2' ; + is( 'moto x 2', imap2_folder_name( $mysync, 'auto/auto' ), 'imap2_folder_name: auto/auto' ) ; + + @{ $mysync->{ regextrans2 } } = ( 's,/,X,g' ) ; + is( q{INBOX}, imap2_folder_name( $mysync, q{} ), 'imap2_folder_name: empty string [s,/,X,g]' ) ; + is( 'blabla', imap2_folder_name( $mysync, 'blabla' ), 'imap2_folder_name: blabla [s,/,X,g]' ) ; + is('spam.spam', imap2_folder_name( $mysync, 'spam/spam'), 'imap2_folder_name: spam/spam [s,/,X,g]'); + is('spamXspam', imap2_folder_name( $mysync, 'spam.spam'), 'imap2_folder_name: spam.spam [s,/,X,g]'); + is('spam.spamXspam', imap2_folder_name( $mysync, 'spam/spam.spam'), 'imap2_folder_name: spam/spam.spam [s,/,X,g]'); + + @{ $mysync->{ regextrans2 } } = ( 's, ,_,g' ) ; + is('blabla', imap2_folder_name( $mysync, 'blabla'), 'imap2_folder_name: blabla [s, ,_,g]'); + is('bla_bla', imap2_folder_name( $mysync, 'bla bla'), 'imap2_folder_name: blabla [s, ,_,g]'); + + @{ $mysync->{ regextrans2 } } = ( q{s,(.*),\U$1,} ) ; + is( 'BLABLA', imap2_folder_name( $mysync, 'blabla' ), q{imap2_folder_name: blabla [s,\U(.*)\E,$1,]} ) ; + + $mysync->{ fixslash2 } = 1 ; + @{ $mysync->{ regextrans2 } } = ( ) ; + is(q{INBOX}, imap2_folder_name( $mysync, q{}), 'imap2_folder_name: empty string'); + is('blabla', imap2_folder_name( $mysync, 'blabla'), 'imap2_folder_name: blabla'); + is('spam.spam', imap2_folder_name( $mysync, 'spam/spam'), 'imap2_folder_name: spam/spam -> spam.spam'); + is('spam_spam', imap2_folder_name( $mysync, 'spam.spam'), 'imap2_folder_name: spam.spam -> spam_spam'); + is('spam.spam_spam', imap2_folder_name( $mysync, 'spam/spam.spam'), 'imap2_folder_name: spam/spam.spam -> spam.spam_spam'); + is('s pam.spam_spa m', imap2_folder_name( $mysync, 's pam/spam.spa m'), 'imap2_folder_name: s pam/spam.spa m -> s pam.spam_spa m'); + + $mysync->{ h1_sep } = '.'; + $mysync->{ h2_sep } = '/'; + is( q{INBOX}, imap2_folder_name( $mysync, q{}), 'imap2_folder_name: empty string'); + is('blabla', imap2_folder_name( $mysync, 'blabla'), 'imap2_folder_name: blabla'); + is('spam.spam', imap2_folder_name( $mysync, 'spam/spam'), 'imap2_folder_name: spam/spam -> spam.spam'); + is('spam/spam', imap2_folder_name( $mysync, 'spam.spam'), 'imap2_folder_name: spam.spam -> spam/spam'); + is('spam.spam/spam', imap2_folder_name( $mysync, 'spam/spam.spam'), 'imap2_folder_name: spam/spam.spam -> spam.spam/spam'); + + + + $mysync->{ fixslash2 } = 0 ; + $mysync->{ h1_prefix } = q{ }; + + is( 'spam.spam/spam', imap2_folder_name( $mysync, 'spam/spam.spam' ), 'imap2_folder_name: spam/spam.spam -> spam.spam/spam' ) ; + is( 'spam.spam/spam', imap2_folder_name( $mysync, ' spam/spam.spam' ), 'imap2_folder_name: spam/spam.spam -> spam.spam/spam' ) ; + + $mysync->{ h1_sep } = '.' ; + $mysync->{ h2_sep } = '/' ; + $mysync->{ h1_prefix } = 'INBOX.' ; + $mysync->{ h2_prefix } = q{} ; + @{ $mysync->{ regextrans2 } } = ( q{s,(.*),\U$1,} ) ; + is( 'BLABLA', imap2_folder_name( $mysync, 'blabla' ), 'imap2_folder_name: blabla' ) ; + is( 'TEST/TEST/TEST/TEST', imap2_folder_name( $mysync, 'INBOX.TEST.test.Test.tesT' ), 'imap2_folder_name: INBOX.TEST.test.Test.tesT' ) ; + @{ $mysync->{ regextrans2 } } = ( q{s,(.*),\L$1,} ) ; + is( 'test/test/test/test', imap2_folder_name( $mysync, 'INBOX.TEST.test.Test.tesT' ), 'imap2_folder_name: INBOX.TEST.test.Test.tesT' ) ; + + # INBOX + $mysync = {} ; + $mysync->{ h1_prefix } = q{Pf1.} ; + $mysync->{ h2_prefix } = q{Pf2/} ; + $mysync->{ h1_sep } = '.'; + $mysync->{ h2_sep } = '/'; + + # + #$mysync->{ debug } = 1 ; + is( 'Pf2/F1/F2/F3', imap2_folder_name( $mysync, 'F1.F2.F3' ), 'imap2_folder_name: F1.F2.F3 -> Pf2/F1/F2/F3' ) ; + is( 'Pf2/F1/INBOX', imap2_folder_name( $mysync, 'F1.INBOX' ), 'imap2_folder_name: F1.INBOX -> Pf2/F1/INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'INBOX' ), 'imap2_folder_name: INBOX -> INBOX' ) ; + + is( 'Pf2/F1/F2/F3', imap2_folder_name( $mysync, 'Pf1.F1.F2.F3' ), 'imap2_folder_name: Pf1.F1.F2.F3 -> Pf2/F1/F2/F3' ) ; + is( 'Pf2/F1/INBOX', imap2_folder_name( $mysync, 'Pf1.F1.INBOX' ), 'imap2_folder_name: Pf1.F1.INBOX -> Pf2/F1/INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'Pf1.INBOX' ), 'imap2_folder_name: Pf1.INBOX -> INBOX' ) ; # not Pf2/INBOX: Yes I can! + + + + # subfolder2 + $mysync = {} ; + $mysync->{ h1_prefix } = q{} ; + $mysync->{ h2_prefix } = q{} ; + $mysync->{ h1_sep } = '/'; + $mysync->{ h2_sep } = '.'; + + + set_regextrans2_for_subfolder2( $mysync ) ; + $mysync->{ subfolder2 } = 'S1.S2' ; + is( 'S1.S2.F1.F2.F3', imap2_folder_name( $mysync, 'F1/F2/F3' ), 'imap2_folder_name: F1/F2/F3 -> S1.S2.F1.F2.F3' ) ; + is( 'S1.S2.INBOX', imap2_folder_name( $mysync, 'INBOX' ), 'imap2_folder_name: F1/F2/F3 -> S1.S2.INBOX' ) ; + + $mysync = {} ; + $mysync->{ h1_prefix } = q{Pf1/} ; + $mysync->{ h2_prefix } = q{Pf2.} ; + $mysync->{ h1_sep } = '/'; + $mysync->{ h2_sep } = '.'; + #$mysync->{ debug } = 1 ; + + set_regextrans2_for_subfolder2( $mysync ) ; + $mysync->{ subfolder2 } = 'Pf2.S1.S2' ; + is( 'Pf2.S1.S2.F1.F2.F3', imap2_folder_name( $mysync, 'F1/F2/F3' ), 'imap2_folder_name: F1/F2/F3 -> Pf2.S1.S2.F1.F2.F3' ) ; + is( 'Pf2.S1.S2.INBOX', imap2_folder_name( $mysync, 'INBOX' ), 'imap2_folder_name: INBOX -> Pf2.S1.S2.INBOX' ) ; + is( 'Pf2.S1.S2.F1.F2.F3', imap2_folder_name( $mysync, 'Pf1/F1/F2/F3' ), 'imap2_folder_name: F1/F2/F3 -> Pf2.S1.S2.F1.F2.F3' ) ; + is( 'Pf2.S1.S2.INBOX', imap2_folder_name( $mysync, 'Pf1/INBOX' ), 'imap2_folder_name: INBOX -> Pf2.S1.S2.INBOX' ) ; + + # subfolder1 + # scenario as the reverse of the previous tests, separators point of vue + $mysync = {} ; + $mysync->{ h1_prefix } = q{Pf1.} ; + $mysync->{ h2_prefix } = q{Pf2/} ; + $mysync->{ h1_sep } = '.'; + $mysync->{ h2_sep } = '/'; + #$mysync->{ debug } = 1 ; + + $mysync->{ subfolder1 } = 'S1.S2' ; + is( 'Pf2/F1/F2/F3', imap2_folder_name( $mysync, 'S1.S2.F1.F2.F3' ), 'imap2_folder_name: S1.S2.F1.F2.F3 -> Pf2/F1/F2/F3' ) ; + is( 'Pf2/F1/F2/F3', imap2_folder_name( $mysync, 'Pf1.S1.S2.F1.F2.F3' ), 'imap2_folder_name: Pf1.S1.S2.F1.F2.F3 -> Pf2/F1/F2/F3' ) ; + + is( 'INBOX', imap2_folder_name( $mysync, 'S1.S2.INBOX' ), 'imap2_folder_name: S1.S2.INBOX -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1.S2' ), 'imap2_folder_name: S1.S2 -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1.S2.' ), 'imap2_folder_name: S1.S2. -> INBOX' ) ; + + is( 'INBOX', imap2_folder_name( $mysync, 'Pf1.S1.S2.INBOX' ), 'imap2_folder_name: Pf1.S1.S2.INBOX -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'Pf1.S1.S2' ), 'imap2_folder_name: Pf1.S1.S2 -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'Pf1.S1.S2.' ), 'imap2_folder_name: Pf1.S1.S2. -> INBOX' ) ; + + + $mysync->{ subfolder1 } = 'S1.S2.' ; + is( 'Pf2/F1/F2/F3', imap2_folder_name( $mysync, 'S1.S2.F1.F2.F3' ), 'imap2_folder_name: S1.S2.F1.F2.F3 -> Pf2/F1/F2/F3' ) ; + is( 'Pf2/F1/F2/F3', imap2_folder_name( $mysync, 'Pf1.S1.S2.F1.F2.F3' ), 'imap2_folder_name: Pf1.S1.S2.F1.F2.F3 -> Pf2/F1/F2/F3' ) ; + + is( 'INBOX', imap2_folder_name( $mysync, 'S1.S2.INBOX' ), 'imap2_folder_name: S1.S2.INBOX -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1.S2' ), 'imap2_folder_name: S1.S2 -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1.S2.' ), 'imap2_folder_name: S1.S2. -> INBOX' ) ; + + is( 'INBOX', imap2_folder_name( $mysync, 'Pf1.S1.S2.INBOX' ), 'imap2_folder_name: Pf1.S1.S2.INBOX -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'Pf1.S1.S2' ), 'imap2_folder_name: Pf1.S1.S2 -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'Pf1.S1.S2.' ), 'imap2_folder_name: Pf1.S1.S2. -> INBOX' ) ; + + + # subfolder1 + # scenario as Gmail + $mysync = {} ; + $mysync->{ h1_prefix } = q{} ; + $mysync->{ h2_prefix } = q{} ; + $mysync->{ h1_sep } = '/'; + $mysync->{ h2_sep } = '/'; + #$mysync->{ debug } = 1 ; + + $mysync->{ subfolder1 } = 'S1/S2' ; + is( 'F1/F2/F3', imap2_folder_name( $mysync, 'S1/S2/F1/F2/F3' ), 'imap2_folder_name: S1/S2/F1/F2/F3 -> F1/F2/F3' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1/S2/INBOX' ), 'imap2_folder_name: S1/S2/INBOX -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1/S2' ), 'imap2_folder_name: S1/S2 -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1/S2/' ), 'imap2_folder_name: S1/S2/ -> INBOX' ) ; + + $mysync->{ subfolder1 } = 'S1/S2/' ; + is( 'F1/F2/F3', imap2_folder_name( $mysync, 'S1/S2/F1/F2/F3' ), 'imap2_folder_name: S1/S2/F1/F2/F3 -> F1/F2/F3' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1/S2/INBOX' ), 'imap2_folder_name: S1/S2/INBOX -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1/S2' ), 'imap2_folder_name: S1/S2 -> INBOX' ) ; + is( 'INBOX', imap2_folder_name( $mysync, 'S1/S2/' ), 'imap2_folder_name: S1/S2/ -> INBOX' ) ; + + + note( 'Leaving tests_imap2_folder_name()' ) ; + return ; +} + + +# Global variables to remove: +# + + +sub imap2_folder_name +{ + my $mysync = shift ; my ( $h1_fold ) = shift ; my ( $h2_fold ) ; if ( $mysync->{f1f2h}{ $h1_fold } ) { $h2_fold = $mysync->{f1f2h}{ $h1_fold } ; - ( $debug or $mysync->{debugfolders} ) and myprint( "f1f2 [$h1_fold] -> [$h2_fold]\n" ) ; + ( $mysync->{ debug } or $mysync->{debugfolders} ) and myprint( "f1f2 [$h1_fold] -> [$h2_fold]\n" ) ; return( $h2_fold ) ; } if ( $mysync->{f1f2auto}{ $h1_fold } ) { $h2_fold = $mysync->{f1f2auto}{ $h1_fold } ; - ( $debug or $mysync->{debugfolders} ) and myprint( "automap [$h1_fold] -> [$h2_fold]\n" ) ; + ( $mysync->{ debug } or $mysync->{debugfolders} ) and myprint( "automap [$h1_fold] -> [$h2_fold]\n" ) ; return( $h2_fold ) ; } + if ( $mysync->{ subfolder1 } ) + { + my $esc_h1_sep = "\\" . $mysync->{ h1_sep } ; + # case where subfolder1 has the sep1 at the end, then remove it + my $part_to_removed = remove_last_char_if_is( $mysync->{ subfolder1 }, $mysync->{ h1_sep } ) ; + # remove the subfolder1 part and the sep1 if present after + $h1_fold =~ s{$part_to_removed($esc_h1_sep)?}{} ; + #myprint( "h1_fold=$h1_fold\n" ) ; + } + + if ( ( '' eq $h1_fold ) or ( $mysync->{ h1_prefix } eq $h1_fold ) ) + { + $h1_fold = 'INBOX' ; + } + $h2_fold = prefix_seperator_invertion( $mysync, $h1_fold ) ; - $h2_fold = regextrans2( $h2_fold ) ; + $h2_fold = regextrans2( $mysync, $h2_fold ) ; return( $h2_fold ) ; } -sub tests_prefix_seperator_invertion { - undef $h1_sep; - undef $h2_sep ; - + +sub tests_remove_last_char_if_is +{ + note( 'Entering tests_remove_last_char_if_is()' ) ; + + is( undef, remove_last_char_if_is( ), 'remove_last_char_if_is: no args => undef' ) ; + is( '', remove_last_char_if_is( '' ), 'remove_last_char_if_is: empty => empty' ) ; + is( '', remove_last_char_if_is( '', 'Z' ), 'remove_last_char_if_is: empty Z => empty' ) ; + is( '', remove_last_char_if_is( 'Z', 'Z' ), 'remove_last_char_if_is: Z Z => empty' ) ; + is( 'abc', remove_last_char_if_is( 'abcZ', 'Z' ), 'remove_last_char_if_is: abcZ Z => abc' ) ; + is( 'abcY', remove_last_char_if_is( 'abcY', 'Z' ), 'remove_last_char_if_is: abcY Z => abcY' ) ; + note( 'Leaving tests_remove_last_char_if_is()' ) ; + return ; +} + + + + +sub remove_last_char_if_is +{ + my $string = shift ; + my $char = shift ; + + if ( ! defined $string ) + { + return ; + } + + if ( ! defined $char ) + { + return $string ; + } + + my $last_char = substr $string, -1 ; + if ( $char eq $last_char ) + { + chop $string ; + return $string ; + } + else + { + return $string ; + } +} + +sub tests_prefix_seperator_invertion +{ + note( 'Entering tests_prefix_seperator_invertion()' ) ; + is( undef, prefix_seperator_invertion( ), 'prefix_seperator_invertion: no args => undef' ) ; is( q{}, prefix_seperator_invertion( undef, q{} ), 'prefix_seperator_invertion: empty string => empty string' ) ; is( 'lalala', prefix_seperator_invertion( undef, 'lalala' ), 'prefix_seperator_invertion: lalala => lalala' ) ; @@ -5668,103 +7190,111 @@ sub tests_prefix_seperator_invertion { is( 'lal.ala', prefix_seperator_invertion( undef, 'lal.ala' ), 'prefix_seperator_invertion: lal.ala => lal.ala' ) ; is( '////', prefix_seperator_invertion( undef, '////' ), 'prefix_seperator_invertion: //// => ////' ) ; is( '.....', prefix_seperator_invertion( undef, '.....' ), 'prefix_seperator_invertion: ..... => .....' ) ; - + my $mysync = { h1_prefix => '', h2_prefix => '', h1_sep => '/', h2_sep => '/', } ; - + is( q{}, prefix_seperator_invertion( $mysync, q{} ), 'prefix_seperator_invertion: $mysync empty string => empty string' ) ; is( 'lalala', prefix_seperator_invertion( $mysync, 'lalala' ), 'prefix_seperator_invertion: $mysync lalala => lalala' ) ; is( 'lal/ala', prefix_seperator_invertion( $mysync, 'lal/ala' ), 'prefix_seperator_invertion: $mysync lal/ala => lal/ala' ) ; is( 'lal.ala', prefix_seperator_invertion( $mysync, 'lal.ala' ), 'prefix_seperator_invertion: $mysync lal.ala => lal.ala' ) ; is( '////', prefix_seperator_invertion( $mysync, '////' ), 'prefix_seperator_invertion: $mysync //// => ////' ) ; is( '.....', prefix_seperator_invertion( $mysync, '.....' ), 'prefix_seperator_invertion: $mysync ..... => .....' ) ; - + $mysync = { h1_prefix => 'PPP', h2_prefix => 'QQQ', h1_sep => 's', h2_sep => 't', } ; - + is( q{QQQ}, prefix_seperator_invertion( $mysync, q{} ), 'prefix_seperator_invertion: PPPQQQst empty string => QQQ' ) ; is( 'QQQlalala', prefix_seperator_invertion( $mysync, 'lalala' ), 'prefix_seperator_invertion: PPPQQQst lalala => QQQlalala' ) ; is( 'QQQlal/ala', prefix_seperator_invertion( $mysync, 'lal/ala' ), 'prefix_seperator_invertion: PPPQQQst lal/ala => QQQlal/ala' ) ; is( 'QQQlal.ala', prefix_seperator_invertion( $mysync, 'lal.ala' ), 'prefix_seperator_invertion: PPPQQQst lal.ala => QQQlal.ala' ) ; is( 'QQQ////', prefix_seperator_invertion( $mysync, '////' ), 'prefix_seperator_invertion: PPPQQQst //// => QQQ////' ) ; is( 'QQQ.....', prefix_seperator_invertion( $mysync, '.....' ), 'prefix_seperator_invertion: PPPQQQst ..... => QQQ.....' ) ; - + is( 'QQQPlalala', prefix_seperator_invertion( $mysync, 'PPPPlalala' ), 'prefix_seperator_invertion: PPPQQQst PPPPlalala => QQQPlalala' ) ; is( 'QQQ', prefix_seperator_invertion( $mysync, 'PPP' ), 'prefix_seperator_invertion: PPPQQQst PPP => QQQ' ) ; is( 'QQQttt', prefix_seperator_invertion( $mysync, 'sss' ), 'prefix_seperator_invertion: PPPQQQst sss => QQQttt' ) ; is( 'QQQt', prefix_seperator_invertion( $mysync, 's' ), 'prefix_seperator_invertion: PPPQQQst s => QQQt' ) ; is( 'QQQtAAAtBBB', prefix_seperator_invertion( $mysync, 'PPPsAAAsBBB' ), 'prefix_seperator_invertion: PPPQQQst PPPsAAAsBBB => QQQtAAAtBBB' ) ; - + + note( 'Leaving tests_prefix_seperator_invertion()' ) ; return ; } -# Global variables to remove: -# $h1_sep -# $h2_sep -# $debug +# Global variables to remove: -sub prefix_seperator_invertion { + +sub prefix_seperator_invertion +{ my $mysync = shift ; my $h1_fold = shift ; my $h2_fold ; if ( not defined $h1_fold ) { return ; } - + my $my_h1_prefix = $mysync->{ h1_prefix } || q{} ; my $my_h2_prefix = $mysync->{ h2_prefix } || q{} ; - my $my_h1_sep = $h1_sep || $mysync->{ h1_sep } || '/' ; - my $my_h2_sep = $h2_sep || $mysync->{ h2_sep } || '/' ; - + my $my_h1_sep = $mysync->{ h1_sep } || '/' ; + my $my_h2_sep = $mysync->{ h2_sep } || '/' ; + # first we remove the prefix $h1_fold =~ s/^\Q$my_h1_prefix\E//x ; - ( $debug or $mysync->{debugfolders} ) and myprint( "removed host1 prefix: [$h1_fold]\n" ) ; - $h2_fold = separator_invert( $h1_fold, $my_h1_sep, $my_h2_sep ) ; - ( $debug or $mysync->{debugfolders} ) and myprint( "inverted separators: [$h2_fold]\n" ) ; + ( $mysync->{ debug } or $mysync->{debugfolders} ) and myprint( "removed host1 prefix: [$h1_fold]\n" ) ; + $h2_fold = separator_invert( $mysync, $h1_fold, $my_h1_sep, $my_h2_sep ) ; + ( $mysync->{ debug } or $mysync->{debugfolders} ) and myprint( "inverted separators: [$h2_fold]\n" ) ; + # Adding the prefix supplied by namespace or the --prefix2 option - $h2_fold = $my_h2_prefix . $h2_fold - unless( ( $my_h2_prefix eq 'INBOX' . $my_h2_sep ) and ( $h2_fold =~ m/^INBOX$/xi ) ) ; - ( $debug or $mysync->{debugfolders} ) and myprint( "added host2 prefix: [$h2_fold]\n" ) ; + # except for INBOX or Inbox + if ( $h2_fold !~ m/^INBOX$/xi ) + { + $h2_fold = $my_h2_prefix . $h2_fold ; + } + + ( $mysync->{ debug } or $mysync->{debugfolders} ) and myprint( "added host2 prefix: [$h2_fold]\n" ) ; return( $h2_fold ) ; } -sub tests_separator_invert { +sub tests_separator_invert +{ note( 'Entering tests_separator_invert()' ) ; - $fixslash2 = 0 ; + my $mysync = {} ; + $mysync->{ fixslash2 } = 0 ; ok( not( defined separator_invert( ) ), 'separator_invert: no args' ) ; ok( not( defined separator_invert( q{} ) ), 'separator_invert: not enough args' ) ; ok( not( defined separator_invert( q{}, q{} ) ), 'separator_invert: not enough args' ) ; - ok( q{} eq separator_invert( q{}, q{}, q{} ), 'separator_invert: 3 empty strings' ) ; - ok( 'lalala' eq separator_invert( 'lalala', q{}, q{} ), 'separator_invert: empty separator' ) ; - ok( 'lalala' eq separator_invert( 'lalala', '/', '/' ), 'separator_invert: same separator /' ) ; - ok( 'lal/ala' eq separator_invert( 'lal/ala', '/', '/' ), 'separator_invert: same separator / 2' ) ; - ok( 'lal.ala' eq separator_invert( 'lal/ala', '/', '.' ), 'separator_invert: separators /.' ) ; - ok( 'lal/ala' eq separator_invert( 'lal.ala', '.', '/' ), 'separator_invert: separators ./' ) ; - ok( 'la.l/ala' eq separator_invert( 'la/l.ala', '.', '/' ), 'separator_invert: separators ./' ) ; + ok( q{} eq separator_invert( $mysync, q{}, q{}, q{} ), 'separator_invert: 3 empty strings' ) ; + ok( 'lalala' eq separator_invert( $mysync, 'lalala', q{}, q{} ), 'separator_invert: empty separator' ) ; + ok( 'lalala' eq separator_invert( $mysync, 'lalala', '/', '/' ), 'separator_invert: same separator /' ) ; + ok( 'lal/ala' eq separator_invert( $mysync, 'lal/ala', '/', '/' ), 'separator_invert: same separator / 2' ) ; + ok( 'lal.ala' eq separator_invert( $mysync, 'lal/ala', '/', '.' ), 'separator_invert: separators /.' ) ; + ok( 'lal/ala' eq separator_invert( $mysync, 'lal.ala', '.', '/' ), 'separator_invert: separators ./' ) ; + ok( 'la.l/ala' eq separator_invert( $mysync, 'la/l.ala', '.', '/' ), 'separator_invert: separators ./' ) ; - ok( 'l/al.ala' eq separator_invert( 'l.al/ala', '/', '.' ), 'separator_invert: separators /.' ) ; - $fixslash2 = 1 ; - ok( 'l_al.ala' eq separator_invert( 'l.al/ala', '/', '.' ), 'separator_invert: separators /.' ) ; + ok( 'l/al.ala' eq separator_invert( $mysync, 'l.al/ala', '/', '.' ), 'separator_invert: separators /.' ) ; + $mysync->{ fixslash2 } = 1 ; + ok( 'l_al.ala' eq separator_invert( $mysync, 'l.al/ala', '/', '.' ), 'separator_invert: separators /.' ) ; note( 'Leaving tests_separator_invert()' ) ; return ; } -# Global variables to remove: -# $fixslash2 -sub separator_invert { - my( $h1_fold, $h1_separator, $h2_separator ) = @_ ; +# Global variables to remove: +# +sub separator_invert +{ + my( $mysync, $h1_fold, $h1_separator, $h2_separator ) = @_ ; - return( undef ) if ( not defined $h1_fold or not defined $h1_separator or not defined $h2_separator ) ; + return( undef ) if ( not all_defined( $mysync, $h1_fold, $h1_separator, $h2_separator ) ) ; # The separator we hope we'll never encounter: 00000000 == 0x00 my $o_sep = "\000" ; @@ -5772,27 +7302,29 @@ sub separator_invert { $h2_fold =~ s,\Q$h2_separator,$o_sep,xg ; $h2_fold =~ s,\Q$h1_separator,$h2_separator,xg ; $h2_fold =~ s,\Q$o_sep,$h1_separator,xg ; - $h2_fold =~ s,/,_,xg if( $fixslash2 and '/' ne $h2_separator and '/' eq $h1_separator ) ; + $h2_fold =~ s,/,_,xg if( $mysync->{ fixslash2 } and '/' ne $h2_separator and '/' eq $h1_separator ) ; return( $h2_fold ) ; } -sub regextrans2 { - my( $h2_fold ) = @_ ; +sub regextrans2 +{ + my( $mysync, $h2_fold ) = @_ ; # Transforming the folder name by the --regextrans2 option(s) - foreach my $regextrans2 ( @regextrans2 ) { + foreach my $regextrans2 ( @{ $mysync->{ regextrans2 } } ) { my $h2_fold_before = $h2_fold ; my $ret = eval "\$h2_fold =~ $regextrans2 ; 1 " ; - ( $debug or $sync->{debugfolders} ) and myprint( "[$h2_fold_before] -> [$h2_fold] using regextrans2 [$regextrans2]\n" ) ; + ( $mysync->{ debug } or $mysync->{debugfolders} ) and myprint( "[$h2_fold_before] -> [$h2_fold] using regextrans2 [$regextrans2]\n" ) ; if ( not ( defined $ret ) or $EVAL_ERROR ) { - die_clean( "error: eval regextrans2 '$regextrans2': $EVAL_ERROR\n" ) ; + exit_clean( $mysync, $EX_USAGE, "error: eval regextrans2 '$regextrans2': $EVAL_ERROR\n" ) ; } } return( $h2_fold ) ; } -sub tests_decompose_regex { +sub tests_decompose_regex +{ note( 'Entering tests_decompose_regex()' ) ; ok( 1, 'decompose_regex 1' ) ; @@ -5803,7 +7335,8 @@ sub tests_decompose_regex { return ; } -sub decompose_regex { +sub decompose_regex +{ my $regex = shift ; my( $left_part, $right_part ) ; @@ -5813,7 +7346,8 @@ sub decompose_regex { } -sub foldersizes { +sub foldersizes +{ my ( $side, $imap, $search_cmd, $abletosearch, @folders ) = @_ ; my $total_size = 0 ; @@ -5859,7 +7393,7 @@ sub foldersizes { if ( $nb_msgs > 0 and @msgs ) { if ( $abletosearch ) { if ( ! $imap->fetch_hash( \@msgs, 'RFC822.SIZE', $hash_ref) ) { - my $error = "$side failure with fetch_hash: $EVAL_ERROR" ; + my $error = "$side failure with fetch_hash: $EVAL_ERROR\n" ; errors_incr( $sync, $error ) ; return ; } @@ -5867,7 +7401,7 @@ sub foldersizes { my $uidnext = $imap->uidnext( $folder ) || $uidnext_default ; my $fetch_hash_uids = $fetch_hash_set || "1:$uidnext" ; if ( ! $imap->fetch_hash( $fetch_hash_uids, 'RFC822.SIZE', $hash_ref ) ) { - my $error = "$side failure with fetch_hash: $EVAL_ERROR" ; + my $error = "$side failure with fetch_hash: $EVAL_ERROR\n" ; errors_incr( $sync, $error ) ; return ; } @@ -5894,7 +7428,8 @@ sub foldersizes { return( $total_nb, $total_size ) ; } -sub timenext { +sub timenext +{ my ( $timenow, $timediff ) ; # $timebefore is global, beurk ! $timenow = time ; @@ -5903,7 +7438,8 @@ sub timenext { return( $timediff ) ; } -sub timesince { +sub timesince +{ my $timeinit = shift || 0 ; my ( $timenow, $timediff ) ; $timenow = time ; @@ -5915,7 +7451,8 @@ sub timesince { -sub tests_flags_regex { +sub tests_flags_regex +{ note( 'Entering tests_flags_regex()' ) ; ok( q{} eq flags_regex(q{} ), 'flags_regex, null string q{}' ) ; @@ -5926,7 +7463,7 @@ sub tests_flags_regex { @regexflag = ( 's/NonJunk//g' ) ; ok( q{\Seen $Spam} eq flags_regex( q{\Seen NonJunk $Spam} ), q{flags_regex, remove NonJunk: 's/NonJunk//g'} ) ; - @regexflag = ( q's/\$Spam//g' ) ; + @regexflag = ( q{s/\$Spam//g} ) ; ok( q{\Seen NonJunk } eq flags_regex( q{\Seen NonJunk $Spam} ), q{flags_regex, remove $Spam: 's/\$Spam//g'} ) ; @regexflag = ( 's/\\\\Seen//g' ) ; @@ -5964,41 +7501,9 @@ sub tests_flags_regex { ok( 'Keep1 Keep3 ' eq flags_regex('RE1 Keep1 RE2 Keep3 RE3 RE4 RE5 '), 'Keep only regex' ) ; @regexflag = ( 's/(.*)/$1 jrdH8u/' ) ; - ok('REM REM REM REM REM jrdH8u' eq flags_regex('REM REM REM REM REM'), q{Keep only regex 's/(.*)/\$1 jrdH8u/'} ) ; + ok('REM REM REM REM REM jrdH8u' eq flags_regex('REM REM REM REM REM'), q{Add jrdH8u 's/(.*)/\$1 jrdH8u/'} ) ; @regexflag = ('s/jrdH8u *//'); - ok('REM REM REM REM REM ' eq flags_regex('REM REM REM REM REM jrdH8u'), q{Keep only regex s/jrdH8u *//} ) ; - - @regexflag = ( - 's/(.*)/$1 jrdH8u/', - 's/.*?(Keep1|Keep2|Keep3|jrdH8u)/$1 /g', - 's/(Keep1|Keep2|Keep3|jrdH8u) (?!(Keep1|Keep2|Keep3|jrdH8u)).*/$1 /g', - 's/jrdH8u *//' - ); - - ok('Keep1 Keep2 ' eq flags_regex('REM Keep1 REM Keep2 REM'), q{Keep only regex 'REM Keep1 REM Keep2 REM'} ) ; - ok('Keep1 Keep2 ' eq flags_regex('Keep1 REM Keep2 REM'), 'Keep only regex'); - ok('Keep1 Keep2 ' eq flags_regex('REM Keep1 Keep2 REM'), 'Keep only regex'); - ok('Keep1 Keep2 ' eq flags_regex('REM Keep1 REM Keep2'), 'Keep only regex'); - ok('Keep1 Keep2 Keep3 ' eq flags_regex('REM Keep1 REM Keep2 REM REM Keep3 REM'), 'Keep only regex'); - ok('Keep1 ' eq flags_regex('REM REM Keep1 REM REM REM '), 'Keep only regex'); - ok('Keep1 Keep3 ' eq flags_regex('RE1 Keep1 RE2 Keep3 RE3 RE4 RE5 '), 'Keep only regex'); - ok(q{} eq flags_regex('REM REM REM REM REM'), 'Keep only regex'); - - @regexflag = ( - 's/(.*)/$1 jrdH8u/', - 's/.*?(\\\\Seen|\\\\Answered|\\\\Flagged|\\\\Deleted|\\\\Draft|jrdH8u)/$1 /g', - 's/(\\\\Seen|\\\\Answered|\\\\Flagged|\\\\Deleted|\\\\Draft|jrdH8u) (?!(\\\\Seen|\\\\Answered|\\\\Flagged|\\\\Deleted|\\\\Draft|jrdH8u)).*/$1 /g', - 's/jrdH8u *//' - ); - - ok('\\Deleted \\Answered ' - eq flags_regex('Blabla $Junk \\Deleted machin \\Answered truc'), 'Keep only regex: Exchange case' ) ; - ok( q{} eq flags_regex( q{} ), 'Keep only regex: Exchange case, null string' ) ; - ok( q{} - eq flags_regex('Blabla $Junk machin truc'), 'Keep only regex: Exchange case, no accepted flags' ) ; - ok( '\\Deleted \\Answered \\Draft \\Flagged ' - eq flags_regex('\\Deleted \\Answered \\Draft \\Flagged '), 'Keep only regex: Exchange case' ) ; - + ok('REM REM REM REM REM ' eq flags_regex('REM REM REM REM REM jrdH8u'), q{Remove jrdH8u s/jrdH8u *//} ) ; @regexflag = ( 's/.*?(?:(\\\\(?:Answered|Flagged|Deleted|Seen|Draft)\s?)|$)/defined($1)?$1:q()/eg' @@ -6018,11 +7523,33 @@ sub tests_flags_regex { eq flags_regex('\\Deleted \\Answered \\Draft \\Flagged '), 'Keep only regex: Exchange case (Phil)' ) ; + @regexflag = ( 's/\\\\Flagged//g' ) ; + + is('\Deleted \Answered \Draft ', + flags_regex('\\Deleted \\Answered \\Draft \\Flagged '), + 'flags_regex: remove \Flagged 1' ) ; + is('\\Deleted \\Answered \\Draft', + flags_regex('\\Deleted \\Flagged \\Answered \\Draft'), + 'flags_regex: remove \Flagged 2' ) ; + + # I didn't understand why it gives \F + # https://perldoc.perl.org/perlrebackslash.html + # \F Foldcase till \E. Not in []. + # https://perldoc.perl.org/functions/fc.html + + # \F Not available in old Perl so I comment the test + + # @regexflag = ( 's/\\Flagged/X/g' ) ; + #is('\Deleted FX \Answered \FX \Draft \FX', + #flags_regex( '\Deleted Flagged \Answered \Flagged \Draft \Flagged' ), + # 'flags_regex: remove \Flagged 3 mistery...' ) ; + note( 'Leaving tests_flags_regex()' ) ; return ; } -sub flags_regex { +sub flags_regex +{ my ( $h1_flags ) = @_ ; foreach my $regexflag ( @regexflag ) { my $h1_flags_orig = $h1_flags ; @@ -6037,12 +7564,13 @@ sub flags_regex { return( $h1_flags ) ; } -sub acls_sync { +sub acls_sync +{ my($h1_fold, $h2_fold) = @_ ; if ( $syncacls ) { - my $h1_hash = $imap1->getacl($h1_fold) + my $h1_hash = $sync->{imap1}->getacl($h1_fold) or myprint( "Could not getacl for $h1_fold: $EVAL_ERROR\n" ) ; - my $h2_hash = $imap2->getacl($h2_fold) + my $h2_hash = $sync->{imap2}->getacl($h2_fold) or myprint( "Could not getacl for $h2_fold: $EVAL_ERROR\n" ) ; my %users = map { ($_, 1) } ( keys %{ $h1_hash} , keys %{ $h2_hash } ) ; foreach my $user (sort keys %users ) { @@ -6052,7 +7580,7 @@ sub acls_sync { $h1_hash->{$user} eq $h2_hash->{$user}); unless ($sync->{dry}) { myprint( "setting acl $h2_fold $user $acl\n" ) ; - $imap2->setacl($h2_fold, $user, $acl) + $sync->{imap2}->setacl($h2_fold, $user, $acl) or myprint( "Could not set acl: $EVAL_ERROR\n" ) ; } } @@ -6061,7 +7589,8 @@ sub acls_sync { } -sub tests_permanentflags { +sub tests_permanentflags +{ note( 'Entering tests_permanentflags()' ) ; my $string; @@ -6081,12 +7610,13 @@ sub tests_permanentflags { return ; } -sub permanentflags { +sub permanentflags +{ my @lines = @_ ; foreach my $line (@lines) { if ( $line =~ m{\[PERMANENTFLAGS\s\(([^)]+?)\)\]}x ) { - ( $debugflags or $debug ) and myprint( "permanentflags: $line" ) ; + ( $debugflags or $sync->{ debug } ) and myprint( "permanentflags: $line" ) ; my $permanentflags = $1 ; if ( $permanentflags =~ m{\\\*}x ) { $permanentflags = q{} ; @@ -6097,7 +7627,8 @@ sub permanentflags { return( q{} ) ; } -sub tests_flags_filter { +sub tests_flags_filter +{ note( 'Entering tests_flags_filter()' ) ; ok( '\Seen' eq flags_filter('\Seen', '\Draft \Seen \Answered'), 'flags_filter ' ); @@ -6113,7 +7644,8 @@ sub tests_flags_filter { return ; } -sub flags_filter { +sub flags_filter +{ my( $flags, $allowed_flags ) = @_ ; my @flags = split /\s+/x, $flags ; @@ -6125,7 +7657,8 @@ sub flags_filter { return( $flags_out ) ; } -sub flagscase { +sub flagscase +{ my $flags = shift ; my @flags = split /\s+/x, $flags ; @@ -6137,7 +7670,8 @@ sub flagscase { return( $flags_out ) ; } -sub tests_flagscase { +sub tests_flagscase +{ note( 'Entering tests_flagscase()' ) ; ok( '\Seen' eq flagscase( '\Seen' ), 'flagscase: \Seen -> \Seen' ) ; @@ -6155,7 +7689,8 @@ sub tests_flagscase { -sub ucsecond { +sub ucsecond +{ my $string = shift ; my $output ; @@ -6167,8 +7702,10 @@ sub ucsecond { } -sub tests_ucsecond { +sub tests_ucsecond +{ note( 'Entering tests_ucsecond()' ) ; + ok( 'aBcde' eq ucsecond( 'abcde' ), 'ucsecond: abcde -> aBcde' ) ; ok( 'ABCDE' eq ucsecond( 'ABCDE' ), 'ucsecond: ABCDE -> ABCDE' ) ; ok( 'ABCDE' eq ucsecond( 'AbCDE' ), 'ucsecond: AbCDE -> ABCDE' ) ; @@ -6183,7 +7720,8 @@ sub tests_ucsecond { } -sub select_msgs { +sub select_msgs +{ my ( $imap, $msgs_all_hash_ref, $search_cmd, $abletosearch, $folder ) = @_ ; my ( @msgs ) ; @@ -6196,7 +7734,8 @@ sub select_msgs { } -sub select_msgs_by_search { +sub select_msgs_by_search +{ my ( $imap, $msgs_all_hash_ref, $search_cmd, $folder ) = @_ ; my ( @msgs, @msgs_all ) ; @@ -6234,7 +7773,8 @@ sub select_msgs_by_search { } -sub select_msgs_by_fetch { +sub select_msgs_by_fetch +{ my ( $imap, $msgs_all_hash_ref, $search_cmd, $folder ) = @_ ; my ( @msgs, @msgs_all, %fetch ) ; @@ -6251,7 +7791,7 @@ sub select_msgs_by_fetch { @msgs_all = sort { $a <=> $b } keys %fetch ; $debugdev and myprint( "Done fetch_hash()\n" ) ; - + return if ( $#msgs_all == 0 && !defined $msgs_all[0] ) ; if ( defined $msgs_all_hash_ref ) { @@ -6286,7 +7826,8 @@ sub select_msgs_by_fetch { return( @msgs ) ; } -sub select_msgs_by_age { +sub select_msgs_by_age +{ my( $imap ) = @_ ; my( @max, @min, @msgs, @inter, @union ) ; @@ -6302,7 +7843,8 @@ sub select_msgs_by_age { return( @msgs ) ; } -sub msgs_from_maxmin { +sub msgs_from_maxmin +{ my( $max_ref, $min_ref ) = @_ ; my( @max, @min, @msgs, @inter, @union ) ; @@ -6325,7 +7867,8 @@ sub msgs_from_maxmin { return( @msgs ) ; } -sub tests_msgs_from_maxmin { +sub tests_msgs_from_maxmin +{ note( 'Entering tests_msgs_from_maxmin()' ) ; my @msgs ; @@ -6346,14 +7889,17 @@ sub tests_msgs_from_maxmin { return ; } -sub tests_info_date_from_uid { - +sub tests_info_date_from_uid +{ + note( 'Entering tests_info_date_from_uid()' ) ; + note( 'Leaving tests_info_date_from_uid()' ) ; return ; } -sub info_date_from_uid { - +sub info_date_from_uid +{ + #my $first_uid = $msgs_all[ 0 ] ; #my $first_idate = $fetch{ $first_uid }->{'INTERNALDATE'} ; #my $first_epoch = epoch( $first_idate ) ; @@ -6362,7 +7908,8 @@ sub info_date_from_uid { } -sub lastuid { +sub lastuid +{ my $imap = shift ; my $folder = shift ; my $lastuid_guess = shift ; @@ -6390,26 +7937,28 @@ sub lastuid { return( $lastuid ) ; } -sub size_filtered { +sub size_filtered +{ my( $h1_size, $h1_msg, $h1_fold, $h2_fold ) = @_ ; $h1_size = 0 if ( ! $h1_size ) ; # null if empty or undef - if (defined $maxsize and $h1_size > $maxsize) { - myprint( "msg $h1_fold/$h1_msg skipped ($h1_size exceeds maxsize limit $maxsize bytes)\n" ) ; - $total_bytes_skipped += $h1_size; - $nb_msg_skipped += 1; + if ( defined $sync->{ maxsize } and $h1_size > $sync->{ maxsize } ) { + myprint( "msg $h1_fold/$h1_msg skipped ($h1_size exceeds maxsize limit $sync->{ maxsize } bytes)\n" ) ; + $sync->{ total_bytes_skipped } += $h1_size; + $sync->{ nb_msg_skipped } += 1; return( 1 ) ; } - if (defined $minsize and $h1_size <= $minsize) { + if ( defined $minsize and $h1_size <= $minsize ) { myprint( "msg $h1_fold/$h1_msg skipped ($h1_size smaller than minsize $minsize bytes)\n" ) ; - $total_bytes_skipped += $h1_size; - $nb_msg_skipped += 1; + $sync->{ total_bytes_skipped } += $h1_size; + $sync->{ nb_msg_skipped } += 1; return( 1 ) ; } return( 0 ) ; } -sub message_exists { +sub message_exists +{ my( $imap, $msg ) = @_ ; return( 1 ) if not $imap->Uid( ) ; @@ -6420,11 +7969,28 @@ sub message_exists { return( 0 ) ; } -sub copy_message { + +# Globals +# $sync->{ total_bytes_skipped } +# $sync->{ nb_msg_skipped } +# $mysync->{ h1_nb_msg_processed } +sub stats_update_skip_message +{ + my $mysync = shift ; # to be used + my $h1_size = shift ; + + $mysync->{ total_bytes_skipped } += $h1_size ; + $mysync->{ nb_msg_skipped } += 1 ; + $mysync->{ h1_nb_msg_processed } +=1 ; + return ; +} + +sub copy_message +{ # copy my ( $mysync, $h1_msg, $h1_fold, $h2_fold, $h1_fir_ref, $permanentflags2, $cache_dir ) = @_ ; - ( $debug or $mysync->{dry}) and myprint( "msg $h1_fold/$h1_msg copying to $h2_fold $mysync->{dry_message}\n" ) ; + ( $mysync->{ debug } or $mysync->{dry}) and myprint( "msg $h1_fold/$h1_msg copying to $h2_fold $mysync->{dry_message}\n" ) ; my $h1_size = $h1_fir_ref->{$h1_msg}->{'RFC822.SIZE'} || 0 ; my $h1_flags = $h1_fir_ref->{$h1_msg}->{'FLAGS'} || q{} ; @@ -6432,33 +7998,28 @@ sub copy_message { if ( size_filtered( $h1_size, $h1_msg, $h1_fold, $h2_fold ) ) { - $h1_nb_msg_processed +=1 ; + $mysync->{ h1_nb_msg_processed } +=1 ; return ; } debugsleep( $mysync ) ; myprint( "- msg $h1_fold/$h1_msg S[$h1_size] F[$h1_flags] I[$h1_idate] has RFC822.SIZE null!\n" ) if ( ! $h1_size ) ; - - if ( $checkmessageexists and not message_exists( $imap1, $h1_msg ) ) { - $total_bytes_skipped += $h1_size; - $nb_msg_skipped += 1; - $h1_nb_msg_processed +=1 ; + if ( $checkmessageexists and not message_exists( $mysync->{imap1}, $h1_msg ) ) { + stats_update_skip_message( $mysync, $h1_size ) ; return ; } - myprint( debugmemory( $sync, " at C1" ) ) ; + myprint( debugmemory( $mysync, " at C1" ) ) ; my ( $string, $string_len ) ; ( $string_len ) = message_for_host2( $mysync, $h1_msg, $h1_fold, $h1_size, $h1_flags, $h1_idate, $h1_fir_ref, \$string ) ; - myprint( debugmemory( $sync, " at C2" ) ) ; + myprint( debugmemory( $mysync, " at C2" ) ) ; # not defined or empty $string if ( ( not $string ) or ( not $string_len ) ) { myprint( "- msg $h1_fold/$h1_msg skipped.\n" ) ; - $total_bytes_skipped += $h1_size; - $nb_msg_skipped += 1; - $h1_nb_msg_processed += 1 ; + stats_update_skip_message( $mysync, $h1_size ) ; return ; } @@ -6466,39 +8027,40 @@ sub copy_message { if ( ( defined $maxlinelength ) or ( defined $minmaxlinelength ) ) { $string = linelengthstuff( $string, $h1_fold, $h1_msg, $string_len, $h1_size, $h1_flags, $h1_idate ) ; if ( not defined $string ) { - $h1_nb_msg_processed +=1 ; - $total_bytes_skipped += $h1_size ; - $nb_msg_skipped += 1 ; + stats_update_skip_message( $mysync, $h1_size ) ; return ; } } my $h1_date = date_for_host2( $h1_msg, $h1_idate ) ; - ( $debug or $debugflags ) and - myprint( "Host1 flags init msg $h1_fold/$h1_msg date [$h1_date] flags [$h1_flags] size [$h1_size]\n" ) ; + ( $mysync->{ debug } or $debugflags ) and + myprint( "Host1: flags init msg $h1_fold/$h1_msg date [$h1_date] flags [$h1_flags] size [$h1_size]\n" ) ; $h1_flags = flags_for_host2( $h1_flags, $permanentflags2 ) ; - ( $debug or $debugflags ) and - myprint( "Host1 flags filt msg $h1_fold/$h1_msg date [$h1_date] flags [$h1_flags] size [$h1_size]\n" ) ; + ( $mysync->{ debug } or $debugflags ) and + myprint( "Host1: flags filt msg $h1_fold/$h1_msg date [$h1_date] flags [$h1_flags] size [$h1_size]\n" ) ; + + $h1_date = undef if ( $h1_date eq q{} ) ; + + my $new_id = append_message_on_host2( $mysync, \$string, $h1_fold, $h1_msg, $string_len, $h2_fold, $h1_size, $h1_flags, $h1_date, $cache_dir ) ; - $h1_date = undef if ($h1_date eq q{}); - my $new_id = append_message_on_host2( \$string, $h1_fold, $h1_msg, $string_len, $h2_fold, $h1_size, $h1_flags, $h1_date, $cache_dir ) ; if ( $new_id and $syncflagsaftercopy ) { - sync_flags_after_copy( $h1_fold, $h1_msg, $h1_flags, $h2_fold, $new_id, $permanentflags2 ) ; + sync_flags_after_copy( $mysync, $h1_fold, $h1_msg, $h1_flags, $h2_fold, $new_id, $permanentflags2 ) ; } - myprint( debugmemory( $sync, " at C3" ) ) ; + myprint( debugmemory( $mysync, " at C3" ) ) ; return $new_id ; } -sub linelengthstuff { +sub linelengthstuff +{ my( $string, $h1_fold, $h1_msg, $string_len, $h1_size, $h1_flags, $h1_idate ) = @_ ; my $maxlinelength_string = max_line_length( $string ) ; $debugmaxlinelength and myprint( "msg $h1_fold/$h1_msg maxlinelength: $maxlinelength_string\n" ) ; @@ -6531,7 +8093,8 @@ sub linelengthstuff { } -sub message_for_host2 { +sub message_for_host2 +{ # global variable list: # @skipmess @@ -6555,16 +8118,19 @@ sub message_for_host2 { my ( $mysync, $h1_msg, $h1_fold, $h1_size, $h1_flags, $h1_idate, $h1_fir_ref, $string_ref ) = @_ ; # abort when missing a parameter - if ( (!$sync) or (!$h1_msg) or (!$h1_fold) or (!$h1_size) or (!defined $h1_flags) or (!defined $h1_idate) or (!$h1_fir_ref) or (!$string_ref) ) { + if ( ( ! $mysync ) or ( ! $h1_msg ) or ( ! $h1_fold ) or ( ! defined $h1_size ) + or ( ! defined $h1_flags) or ( ! defined $h1_idate ) + or ( ! $h1_fir_ref) or ( ! $string_ref ) ) + { return ; } - myprint( debugmemory( $sync, " at M1" ) ) ; + myprint( debugmemory( $mysync, " at M1" ) ) ; - my $imap1 = $mysync->{imap1} ; - my $string_ok = $imap1->message_to_file( $string_ref, $h1_msg ) ; - myprint( debugmemory( $sync, " at M2" ) ) ; + my $string_ok = $mysync->{imap1}->message_to_file( $string_ref, $h1_msg ) ; + + myprint( debugmemory( $mysync, " at M2" ) ) ; my $string_len = length_ref( $string_ref ) ; @@ -6573,10 +8139,9 @@ sub message_for_host2 { # undef or 0 length my $error = join q{}, "- msg $h1_fold/$h1_msg {$string_len} S[$h1_size] F[$h1_flags] I[$h1_idate] could not be fetched: ", - $imap1->LastError || q{}, "\n" ; + $mysync->{imap1}->LastError || q{}, "\n" ; errors_incr( $mysync, $error ) ; - $total_bytes_error += $h1_size if ( $h1_size ) ; - $h1_nb_msg_processed +=1 ; + $mysync->{ h1_nb_msg_processed } +=1 ; return ; } @@ -6621,7 +8186,7 @@ sub message_for_host2 { if ( $mysync->{addheader} and defined $h1_fir_ref->{$h1_msg}->{'NO_HEADER'} ) { my $header = add_header( $h1_msg ) ; - $debug and myprint( "msg $h1_fold/$h1_msg adding custom header [$header]\n" ) ; + $mysync->{ debug } and myprint( "msg $h1_fold/$h1_msg adding custom header [$header]\n" ) ; ${ $string_ref } = $header . "\r\n" . ${ $string_ref } ; } @@ -6633,12 +8198,15 @@ sub message_for_host2 { ${ $string_ref }, "F message content ended on previous line\n", q{=} x $STD_CHAR_PER_LINE, "\n" ) ; - myprint( debugmemory( $sync, " at M3" ) ) ; + myprint( debugmemory( $mysync, " at M3" ) ) ; return $string_len ; } -sub tests_message_for_host2 { + + +sub tests_message_for_host2 +{ note( 'Entering tests_message_for_host2()' ) ; @@ -6647,7 +8215,7 @@ sub tests_message_for_host2 { is( undef, message_for_host2( ), q{message_for_host2: no args} ) ; is( undef, message_for_host2( $mysync, $h1_msg, $h1_fold, $h1_size, $h1_flags, $h1_idate, $h1_fir_ref, $string_ref ), q{message_for_host2: undef args} ) ; - require Test::MockObject ; + require_ok( "Test::MockObject" ) ; my $imapT = Test::MockObject->new( ) ; $mysync->{imap1} = $imapT ; my $string ; @@ -6711,13 +8279,496 @@ sub tests_message_for_host2 { return ; } -sub length_ref { +sub tests_labels_remove_subfolder1 +{ + note( 'Entering tests_labels_remove_subfolder1()' ) ; + is( undef, labels_remove_subfolder1( ), 'labels_remove_subfolder1: no parameters => undef' ) ; + is( 'Blabla', labels_remove_subfolder1( 'Blabla' ), 'labels_remove_subfolder1: one parameter Blabla => Blabla' ) ; + is( 'Blan blue', labels_remove_subfolder1( 'Blan blue' ), 'labels_remove_subfolder1: one parameter Blan blue => Blan blue' ) ; + is( '\Bla "Blan blan" Blabla', labels_remove_subfolder1( '\Bla "Blan blan" Blabla' ), + 'labels_remove_subfolder1: one parameter \Bla "Blan blan" Blabla => \Bla "Blan blan" Blabla' ) ; + + is( 'Bla', labels_remove_subfolder1( 'Subf/Bla', 'Subf' ), 'labels_remove_subfolder1: Subf/Bla Subf => "Bla"' ) ; + + + is( '"\\\\Bla"', labels_remove_subfolder1( '"\\\\Bla"', 'Subf' ), 'labels_remove_subfolder1: "\\\\Bla" Subf => "\\\\Bla"' ) ; + + is( 'Bla Kii', labels_remove_subfolder1( 'Subf/Bla Subf/Kii', 'Subf' ), + 'labels_remove_subfolder1: Subf/Bla Subf/Kii, Subf => "Bla" "Kii"' ) ; + + is( '"\\\\Bla" Kii', labels_remove_subfolder1( '"\\\\Bla" Subf/Kii', 'Subf' ), + 'labels_remove_subfolder1: "\\\\Bla" Subf/Kii Subf => "\\\\Bla" Kii' ) ; + + is( '"Blan blan"', labels_remove_subfolder1( '"Subf/Blan blan"', 'Subf' ), + 'labels_remove_subfolder1: "Subf/Blan blan" Subf => "Blan blan"' ) ; + + is( '"\\\\Loo" "Blan blan" Kii', labels_remove_subfolder1( '"\\\\Loo" "Subf/Blan blan" Subf/Kii', 'Subf' ), + 'labels_remove_subfolder1: "\\\\Loo" "Subf/Blan blan" Subf/Kii + Subf => "\\\\Loo" "Blan blan" Kii' ) ; + + is( '"\\\\Inbox"', labels_remove_subfolder1( 'Subf/INBOX', 'Subf' ), + 'labels_remove_subfolder1: Subf/INBOX + Subf => "\\\\Inbox"' ) ; + + is( '"\\\\Loo" "Blan blan" Kii "\\\\Inbox"', labels_remove_subfolder1( '"\\\\Loo" "Subf/Blan blan" Subf/Kii Subf/INBOX', 'Subf' ), + 'labels_remove_subfolder1: "\\\\Loo" "Subf/Blan blan" Subf/Kii Subf/INBOX + Subf => "\\\\Loo" "Blan blan" Kii "\\\\Inbox"' ) ; + + + note( 'Leaving tests_labels_remove_subfolder1()' ) ; + return ; +} + + + +sub labels_remove_subfolder1 +{ + my $labels = shift ; + my $subfolder1 = shift ; + + if ( not defined $labels ) { return ; } + if ( not defined $subfolder1 ) { return $labels ; } + + my @labels = quotewords('\s+', 1, $labels ) ; + #myprint( "@labels\n" ) ; + my @labels_subfolder2 ; + + foreach my $label ( @labels ) + { + if ( $label =~ m{zzzzzzzzzz} ) + { + # \Seen \Deleted ... stay the same + push @labels_subfolder2, $label ; + } + else + { + # Remove surrounding quotes if any, to add them again in case of space + $label = join( '', quotewords('\s+', 0, $label ) ) ; + $label =~ s{$subfolder1/?}{} ; + if ( 'INBOX' eq $label ) + { + push @labels_subfolder2, q{"\\\\Inbox"} ; + } + elsif ( $label =~ m{\\} ) + { + push @labels_subfolder2, qq{"\\$label"} ; + } + elsif ( $label =~ m{ } ) + { + push @labels_subfolder2, qq{"$label"} ; + } + else + { + push @labels_subfolder2, $label ; + } + } + } + + my $labels_subfolder2 = join( ' ', sort uniq( @labels_subfolder2 ) ) ; + + return $labels_subfolder2 ; +} + +sub tests_labels_remove_special +{ + note( 'Entering tests_labels_remove_special()' ) ; + + is( undef, labels_remove_special( ), 'labels_remove_special: no parameters => undef' ) ; + is( '', labels_remove_special( '' ), 'labels_remove_special: empty string => empty string' ) ; + is( '', labels_remove_special( '"\\\\Inbox"' ), 'labels_remove_special:"\\\\Inbox" => empty string' ) ; + is( '', labels_remove_special( '"\\\\Inbox" "\\\\Starred"' ), 'labels_remove_special:"\\\\Inbox" "\\\\Starred" => empty string' ) ; + is( 'Bar Foo', labels_remove_special( 'Foo Bar' ), 'labels_remove_special:Foo Bar => Bar Foo' ) ; + is( 'Bar Foo', labels_remove_special( 'Foo Bar "\\\\Inbox"' ), 'labels_remove_special:Foo Bar "\\\\Inbox" => Bar Foo' ) ; + note( 'Leaving tests_labels_remove_special()' ) ; + return ; +} + + + + +sub labels_remove_special +{ + my $labels = shift ; + + if ( not defined $labels ) { return ; } + + my @labels = quotewords('\s+', 1, $labels ) ; + myprint( "labels before remove_non_folded: @labels\n" ) ; + my @labels_remove_special ; + + foreach my $label ( @labels ) + { + if ( $label =~ m{^\"\\\\} ) + { + # not kept + } + else + { + push @labels_remove_special, $label ; + } + } + + my $labels_remove_special = join( ' ', sort @labels_remove_special ) ; + + return $labels_remove_special ; +} + + +sub tests_labels_add_subfolder2 +{ + note( 'Entering tests_labels_add_subfolder2()' ) ; + is( undef, labels_add_subfolder2( ), 'labels_add_subfolder2: no parameters => undef' ) ; + is( 'Blabla', labels_add_subfolder2( 'Blabla' ), 'labels_add_subfolder2: one parameter Blabla => Blabla' ) ; + is( 'Blan blue', labels_add_subfolder2( 'Blan blue' ), 'labels_add_subfolder2: one parameter Blan blue => Blan blue' ) ; + is( '\Bla "Blan blan" Blabla', labels_add_subfolder2( '\Bla "Blan blan" Blabla' ), + 'labels_add_subfolder2: one parameter \Bla "Blan blan" Blabla => \Bla "Blan blan" Blabla' ) ; + + is( 'Subf/Bla', labels_add_subfolder2( 'Bla', 'Subf' ), 'labels_add_subfolder2: Bla Subf => "Subf/Bla"' ) ; + + + is( 'Subf/\Bla', labels_add_subfolder2( '\\\\Bla', 'Subf' ), 'labels_add_subfolder2: \Bla Subf => \Bla' ) ; + + is( 'Subf/Bla Subf/Kii', labels_add_subfolder2( 'Bla Kii', 'Subf' ), + 'labels_add_subfolder2: Bla Kii Subf => "Subf/Bla" "Subf/Kii"' ) ; + + is( 'Subf/Kii Subf/\Bla', labels_add_subfolder2( '\\\\Bla Kii', 'Subf' ), + 'labels_add_subfolder2: \Bla Kii Subf => \Bla Subf/Kii' ) ; + + is( '"Subf/Blan blan"', labels_add_subfolder2( '"Blan blan"', 'Subf' ), + 'labels_add_subfolder2: "Blan blan" Subf => "Subf/Blan blan"' ) ; + + is( '"Subf/Blan blan" Subf/Kii Subf/\Loo', labels_add_subfolder2( '\\\\Loo "Blan blan" Kii', 'Subf' ), + 'labels_add_subfolder2: \Loo "Blan blan" Kii + Subf => "Subf/Blan blan" Subf/Kii Subf/\Loo' ) ; + + # "\\Inbox" is special, add to subfolder INBOX also because Gmail will but ... + is( '"Subf/\\\\Inbox" Subf/INBOX', labels_add_subfolder2( '"\\\\Inbox"', 'Subf' ), + 'labels_add_subfolder2: "\\\\Inbox" Subf => "Subf/\\\\Inbox" Subf/INBOX' ) ; + + # but not with INBOX folder + is( '"Subf/\\\\Inbox"', labels_add_subfolder2( '"\\\\Inbox"', 'Subf', 'INBOX' ), + 'labels_add_subfolder2: "\\\\Inbox" Subf INBOX => "Subf/\\\\Inbox"' ) ; + + # two times => one time + is( '"Subf/\\\\Inbox" Subf/INBOX', labels_add_subfolder2( '"\\\\Inbox" "\\\\Inbox"', 'Subf' ), + 'labels_add_subfolder2: "\\\\Inbox" "\\\\Inbox" Subf => "Subf/\\\\Inbox"' ) ; + + is( '"Subf/\\\\Starred"', labels_add_subfolder2( '"\\\\Starred"', 'Subf' ), + 'labels_add_subfolder2: "\\\\Starred" Subf => "Subf/\\\\Starred"' ) ; + + note( 'Leaving tests_labels_add_subfolder2()' ) ; + return ; +} + +sub labels_add_subfolder2 +{ + my $labels = shift ; + my $subfolder2 = shift ; + my $h1_folder = shift || q{} ; + + if ( not defined $labels ) { return ; } + if ( not defined $subfolder2 ) { return $labels ; } + + # Isn't it messy? + if ( 'INBOX' eq $h1_folder ) + { + $labels .= ' "\\\\Inbox"' ; + } + + my @labels = uniq( quotewords('\s+', 1, $labels ) ) ; + myprint( "labels before subfolder2: @labels\n" ) ; + my @labels_subfolder2 ; + + + foreach my $label ( @labels ) + { + # Isn't it more messy? + if ( ( q{"\\\\Inbox"} eq $label ) and ( 'INBOX' ne $h1_folder ) ) + { + if ( $subfolder2 =~ m{ } ) + { + push @labels_subfolder2, qq{"$subfolder2/INBOX"} ; + } + else + { + push @labels_subfolder2, "$subfolder2/INBOX" ; + } + } + if ( $label =~ m{^\"\\\\} ) + { + # \Seen \Deleted ... stay the same + #push @labels_subfolder2, $label ; + # Remove surrounding quotes if any, to add them again + $label = join( '', quotewords('\s+', 0, $label ) ) ; + push @labels_subfolder2, qq{"$subfolder2/\\$label"} ; + + } + else + { + # Remove surrounding quotes if any, to add them again in case of space + $label = join( '', quotewords('\s+', 0, $label ) ) ; + if ( $label =~ m{ } ) + { + push @labels_subfolder2, qq{"$subfolder2/$label"} ; + } + else + { + push @labels_subfolder2, "$subfolder2/$label" ; + } + } + } + + my $labels_subfolder2 = join( ' ', sort @labels_subfolder2 ) ; + + return $labels_subfolder2 ; +} + +sub tests_labels +{ + note( 'Entering tests_labels()' ) ; + + is( undef, labels( ), 'labels: no parameters => undef' ) ; + is( undef, labels( undef ), 'labels: undef => undef' ) ; + require_ok( "Test::MockObject" ) ; + my $myimap = Test::MockObject->new( ) ; + + $myimap->mock( 'fetch_hash', + sub { + return( + { '1' => { + 'X-GM-LABELS' => '\Seen Blabla' + } + } + ) ; + } + ) ; + $myimap->mock( 'Debug' , sub { } ) ; + $myimap->mock( 'Unescape', sub { return Mail::IMAPClient::Unescape( @_ ) } ) ; # real one + + is( undef, labels( $myimap ), 'labels: one parameter => undef' ) ; + is( '\Seen Blabla', labels( $myimap, '1' ), 'labels: $mysync UID_1 => \Seen Blabla' ) ; + + note( 'Leaving tests_labels()' ) ; + return ; +} + +sub labels +{ + my ( $myimap, $uid ) = @ARG ; + + if ( not all_defined( $myimap, $uid ) ) { + return ; + } + + my $hash = $myimap->fetch_hash( [ $uid ], 'X-GM-LABELS' ) ; + + my $labels = $hash->{ $uid }->{ 'X-GM-LABELS' } ; + #$labels = $myimap->Unescape( $labels ) ; + return $labels ; +} + +sub tests_synclabels +{ + note( 'Entering tests_synclabels()' ) ; + + is( undef, synclabels( ), 'synclabels: no parameters => undef' ) ; + is( undef, synclabels( undef ), 'synclabels: undef => undef' ) ; + my $mysync ; + is( undef, synclabels( $mysync ), 'synclabels: var undef => undef' ) ; + + require_ok( "Test::MockObject" ) ; + $mysync = {} ; + + my $myimap1 = Test::MockObject->new( ) ; + $myimap1->mock( 'fetch_hash', + sub { + return( + { '1' => { + 'X-GM-LABELS' => '\Seen Blabla' + } + } + ) ; + } + ) ; + $myimap1->mock( 'Debug', sub { } ) ; + $myimap1->mock( 'Unescape', sub { return Mail::IMAPClient::Unescape( @_ ) } ) ; # real one + + my $myimap2 = Test::MockObject->new( ) ; + + $myimap2->mock( 'store', + sub { + return 1 ; + } + ) ; + + + $mysync->{imap1} = $myimap1 ; + $mysync->{imap2} = $myimap2 ; + + is( undef, synclabels( $mysync ), 'synclabels: fresh $mysync => undef' ) ; + + is( undef, synclabels( $mysync, '1' ), 'synclabels: $mysync UID_1 alone => undef' ) ; + is( 1, synclabels( $mysync, '1', '2' ), 'synclabels: $mysync UID_1 UID_2 => 1' ) ; + + note( 'Leaving tests_synclabels()' ) ; + return ; +} + + +sub synclabels +{ + my( $mysync, $uid1, $uid2 ) = @ARG ; + + if ( not all_defined( $mysync, $uid1, $uid2 ) ) { + return ; + } + my $myimap1 = $mysync->{ 'imap1' } || return ; + my $myimap2 = $mysync->{ 'imap2' } || return ; + + $mysync->{debuglabels} and $myimap1->Debug( 1 ) ; + my $labels1 = labels( $myimap1, $uid1 ) ; + $mysync->{debuglabels} and $myimap1->Debug( 0 ) ; + $mysync->{debuglabels} and myprint( "Host1 labels: $labels1\n" ) ; + + + + if ( $mysync->{ subfolder1 } and $labels1 ) + { + $labels1 = labels_remove_subfolder1( $labels1, $mysync->{ subfolder1 } ) ; + $mysync->{debuglabels} and myprint( "Host1 labels with subfolder1: $labels1\n" ) ; + } + + if ( $mysync->{ subfolder2 } and $labels1 ) + { + $labels1 = labels_add_subfolder2( $labels1, $mysync->{ subfolder2 } ) ; + $mysync->{debuglabels} and myprint( "Host1 labels with subfolder2: $labels1\n" ) ; + } + + my $store ; + if ( $labels1 and not $mysync->{ dry } ) + { + $mysync->{ debuglabels } and $myimap2->Debug( 1 ) ; + $store = $myimap2->store( $uid2, "X-GM-LABELS ($labels1)" ) ; + $mysync->{ debuglabels } and $myimap2->Debug( 0 ) ; + } + return $store ; +} + + +sub tests_resynclabels +{ + note( 'Entering tests_resynclabels()' ) ; + + is( undef, resynclabels( ), 'resynclabels: no parameters => undef' ) ; + is( undef, resynclabels( undef ), 'resynclabels: undef => undef' ) ; + my $mysync ; + is( undef, resynclabels( $mysync ), 'resynclabels: var undef => undef' ) ; + + my ( $h1_fir_ref, $h2_fir_ref ) ; + + $mysync->{ debuglabels } = 1 ; + $h1_fir_ref->{ 11 }->{ 'X-GM-LABELS' } = '\Seen Baa Kii' ; + $h2_fir_ref->{ 22 }->{ 'X-GM-LABELS' } = '\Seen Baa Kii' ; + + # labels are equal + is( 1, resynclabels( $mysync, 11, 22, $h1_fir_ref, $h2_fir_ref ), + 'resynclabels: $mysync UID_1 UID_2 labels are equal => 1' ) ; + + # labels are different + $h2_fir_ref->{ 22 }->{ 'X-GM-LABELS' } = '\Seen Zuu' ; + require_ok( "Test::MockObject" ) ; + my $myimap2 = Test::MockObject->new( ) ; + $myimap2->mock( 'store', + sub { + return 1 ; + } + ) ; + $myimap2->mock( 'Debug', sub { } ) ; + $mysync->{imap2} = $myimap2 ; + + is( 1, resynclabels( $mysync, 11, 22, $h1_fir_ref, $h2_fir_ref ), + 'resynclabels: $mysync UID_1 UID_2 labels are not equal => store => 1' ) ; + + note( 'Leaving tests_resynclabels()' ) ; + return ; +} + + + +sub resynclabels +{ + my( $mysync, $uid1, $uid2, $h1_fir_ref, $h2_fir_ref, $h1_folder ) = @ARG ; + + if ( not all_defined( $mysync, $uid1, $uid2, $h1_fir_ref, $h2_fir_ref ) ) { + return ; + } + + my $labels1 = $h1_fir_ref->{ $uid1 }->{ 'X-GM-LABELS' } || q{} ; + my $labels2 = $h2_fir_ref->{ $uid2 }->{ 'X-GM-LABELS' } || q{} ; + + if ( $mysync->{ subfolder1 } and $labels1 ) + { + $labels1 = labels_remove_subfolder1( $labels1, $mysync->{ subfolder1 } ) ; + } + + if ( $mysync->{ subfolder2 } and $labels1 ) + { + $labels1 = labels_add_subfolder2( $labels1, $mysync->{ subfolder2 }, $h1_folder ) ; + $labels2 = labels_remove_special( $labels2 ) ; + } + $mysync->{ debuglabels } and myprint( "Host1 labels fixed: $labels1\n" ) ; + $mysync->{ debuglabels } and myprint( "Host2 labels : $labels2\n" ) ; + + my $store ; + if ( $labels1 eq $labels2 ) + { + # no sync needed + $mysync->{ debuglabels } and myprint( "Labels are already equal\n" ) ; + return 1 ; + } + elsif ( not $mysync->{ dry } ) + { + # sync needed + $mysync->{debuglabels} and $mysync->{imap2}->Debug( 1 ) ; + $store = $mysync->{imap2}->store( $uid2, "X-GM-LABELS ($labels1)" ) ; + $mysync->{debuglabels} and $mysync->{imap2}->Debug( 0 ) ; + } + + return $store ; +} + +sub tests_uniq +{ + note( 'Entering tests_uniq()' ) ; + + is( 0, uniq( ), 'uniq: undef => 0' ) ; + is_deeply( [ 'one' ], [ uniq( 'one' ) ], 'uniq: one => one' ) ; + is_deeply( [ 'one' ], [ uniq( 'one', 'one' ) ], 'uniq: one one => one' ) ; + is_deeply( [ 'one', 'two' ], [ uniq( 'one', 'one', 'two', 'one', 'two' ) ], 'uniq: one one two one two => one two' ) ; + note( 'Leaving tests_uniq()' ) ; + return ; +} + +sub uniq +{ + my @list = @ARG ; + my %seen = ( ) ; + my @uniq = ( ) ; + foreach my $item ( @list ) { + if ( ! $seen{ $item } ) { + $seen{ $item } = 1 ; + push( @uniq, $item ) ; + } + } + return @uniq ; +} + + +sub length_ref +{ my $string_ref = shift ; my $string_len = defined ${ $string_ref } ? length( ${ $string_ref } ) : q{} ; # length or empty string return $string_len ; } -sub tests_length_ref { +sub tests_length_ref +{ note( 'Entering tests_length_ref()' ) ; my $notdefined ; @@ -6733,29 +8784,31 @@ sub tests_length_ref { return ; } -sub date_for_host2 { +sub date_for_host2 +{ my( $h1_msg, $h1_idate ) = @_ ; my $h1_date = q{} ; if ( $syncinternaldates ) { $h1_date = $h1_idate ; - $debug and myprint( "internal date from host1: [$h1_date]\n" ) ; + $sync->{ debug } and myprint( "internal date from host1: [$h1_date]\n" ) ; $h1_date = good_date( $h1_date ) ; - $debug and myprint( "internal date from host1: [$h1_date] (fixed)\n" ) ; + $sync->{ debug } and myprint( "internal date from host1: [$h1_date] (fixed)\n" ) ; } if ( $idatefromheader ) { - $h1_date = $imap1->get_header( $h1_msg, 'Date' ) ; - $debug and myprint( "header date from host1: [$h1_date]\n" ) ; + $h1_date = $sync->{imap1}->get_header( $h1_msg, 'Date' ) ; + $sync->{ debug } and myprint( "header date from host1: [$h1_date]\n" ) ; $h1_date = good_date( $h1_date ) ; - $debug and myprint( "header date from host1: [$h1_date] (fixed)\n" ) ; + $sync->{ debug } and myprint( "header date from host1: [$h1_date] (fixed)\n" ) ; } return( $h1_date ) ; } -sub flags_for_host2 { +sub flags_for_host2 +{ my( $h1_flags, $permanentflags2 ) = @_ ; # RFC 2060: This flag can not be altered by any client $h1_flags =~ s@\\Recent\s?@@xgi ; @@ -6769,7 +8822,8 @@ sub flags_for_host2 { return( $h1_flags ) ; } -sub subject { +sub subject +{ my $string = shift ; my $subject = q{} ; @@ -6782,7 +8836,8 @@ sub subject { return( $subject ) ; } -sub tests_subject { +sub tests_subject +{ note( 'Entering tests_subject()' ) ; ok( q{} eq subject( q{} ), 'subject: null') ; @@ -6833,32 +8888,27 @@ EOF # GlobVar -# $sync # $max_msg_size_in_bytes -# $imap2 -# $imap1 -# $total_bytes_error -# $h1_nb_msg_processed # $h2_uidguess # ... # # -sub append_message_on_host2 { - my( $string_ref, $h1_fold, $h1_msg, $string_len, $h2_fold, $h1_size, $h1_flags, $h1_date, $cache_dir ) = @_ ; - myprint( debugmemory( $sync, " at A1" ) ) ; +sub append_message_on_host2 +{ + my( $mysync, $string_ref, $h1_fold, $h1_msg, $string_len, $h2_fold, $h1_size, $h1_flags, $h1_date, $cache_dir ) = @_ ; + myprint( debugmemory( $mysync, " at A1" ) ) ; my $new_id ; - if ( ! $sync->{dry} ) { + if ( ! $mysync->{dry} ) { $max_msg_size_in_bytes = max( $h1_size, $max_msg_size_in_bytes ) ; - $new_id = $imap2->append_string( $h2_fold, ${ $string_ref }, $h1_flags, $h1_date ) ; - myprint( debugmemory( $sync, " at A2" ) ) ; + $new_id = $mysync->{imap2}->append_string( $h2_fold, ${ $string_ref }, $h1_flags, $h1_date ) ; + myprint( debugmemory( $mysync, " at A2" ) ) ; if ( ! $new_id){ my $subject = subject( ${ $string_ref } ) ; - my $error_imap = $imap2->LastError || q{} ; - my $error = "- msg $h1_fold/$h1_msg {$string_len} could not append ( Subject:[$subject], Date:[$h1_date], Size:[$h1_size] ) to folder $h2_fold: $error_imap\n" ; - errors_incr( $sync, $error ) ; - $total_bytes_error += $h1_size; - $h1_nb_msg_processed +=1 ; + my $error_imap = $mysync->{imap2}->LastError || q{} ; + my $error = "- msg $h1_fold/$h1_msg {$string_len} could not append ( Subject:[$subject], Date:[$h1_date], Size:[$h1_size], Flags:[$h1_flags] ) to folder $h2_fold: $error_imap\n" ; + errors_incr( $mysync, $error ) ; + $mysync->{ h1_nb_msg_processed } +=1 ; return ; } else{ @@ -6866,44 +8916,47 @@ sub append_message_on_host2 { # $new_id is an id if the IMAP server has the # UIDPLUS capability else just a ref if ( $new_id !~ m{^\d+$}x ) { - $new_id = lastuid( $imap2, $h2_fold, $h2_uidguess ) ; + $new_id = lastuid( $mysync->{imap2}, $h2_fold, $h2_uidguess ) ; } + if ( $mysync->{ synclabels } ) { synclabels( $mysync, $h1_msg, $new_id ) } $h2_uidguess += 1 ; - $sync->{total_bytes_transferred} += $h1_size ; - $sync->{nb_msg_transferred} += 1 ; - $h1_nb_msg_processed +=1 ; + $mysync->{total_bytes_transferred} += $h1_size ; + $mysync->{nb_msg_transferred} += 1 ; + $mysync->{ h1_nb_msg_processed } +=1 ; - my $time_spent = timesince( $sync->{begin_transfer_time} ) ; - my $rate = bytes_display_string( $sync->{total_bytes_transferred} / $time_spent ) ; + my $time_spent = timesince( $mysync->{begin_transfer_time} ) ; + my $rate = bytes_display_string( $mysync->{total_bytes_transferred} / $time_spent ) ; my $eta = eta( $time_spent, - $h1_nb_msg_processed, $h1_nb_msg_start, $sync->{nb_msg_transferred} ) ; - my $amount_transferred = bytes_display_string( $sync->{total_bytes_transferred} ) ; - myprintf( "msg %s/%-19s copied to %s/%-10s %.2f msgs/s %s/s %s copied %s\n", - $h1_fold, "$h1_msg {$string_len}", $h2_fold, $new_id, $sync->{nb_msg_transferred}/$time_spent, $rate, + $mysync->{ h1_nb_msg_processed }, $h1_nb_msg_start, $mysync->{nb_msg_transferred} ) ; + my $amount_transferred = bytes_display_string( $mysync->{total_bytes_transferred} ) ; + myprintf( "msg %s/%-19s copied to %s/%-10s %.2f msgs/s %s/s %s copied %s\n", + $h1_fold, "$h1_msg {$string_len}", $h2_fold, $new_id, $mysync->{nb_msg_transferred}/$time_spent, $rate, $amount_transferred, $eta ); - sleep_if_needed( $sync ) ; + sleep_if_needed( $mysync ) ; if ( $usecache and $cacheaftercopy and $new_id =~ m{^\d+$}x ) { $debugcache and myprint( "touch $cache_dir/${h1_msg}_$new_id\n" ) ; touch( "$cache_dir/${h1_msg}_$new_id" ) or croak( "Couldn't touch $cache_dir/${h1_msg}_$new_id" ) ; } - if ( $delete1 ) { - delete_message_on_host1( $h1_msg, $h1_fold ) ; + if ( $mysync->{ delete1 } ) { + delete_message_on_host1( $mysync, $h1_fold, $mysync->{ expungeaftereach }, $h1_msg ) ; } #myprint( "PRESS ENTER" ) and my $a = <> ; + return( $new_id ) ; } } else{ $nb_msg_skipped_dry_mode += 1 ; - $h1_nb_msg_processed +=1 ; + $mysync->{ h1_nb_msg_processed } +=1 ; } return ; } -sub tests_sleep_if_needed { +sub tests_sleep_if_needed +{ note( 'Entering tests_sleep_if_needed()' ) ; is( undef, sleep_if_needed( ), 'sleep_if_needed: no args => undef' ) ; @@ -6925,7 +8978,7 @@ sub tests_sleep_if_needed { $mysync->{maxsleep} = 0.1 ; $mysync->{begin_transfer_time} = time - 2 ; # 2 s before again is( '0.10', sleep_if_needed( $mysync ), 'sleep_if_needed: total_bytes_transferred == 4000 since 2s but maxsleep 0.1s => sleep 0.1s' ) ; - + $mysync->{maxbytesafter} = 4000 ; $mysync->{begin_transfer_time} = time - 2 ; # 2 s before again is( 0, sleep_if_needed( $mysync ), 'sleep_if_needed: maxbytesafter == total_bytes_transferred => no sleep => 0' ) ; @@ -6935,7 +8988,8 @@ sub tests_sleep_if_needed { } -sub sleep_if_needed { +sub sleep_if_needed +{ my( $mysync ) = shift ; if ( ! $mysync ) { @@ -6947,11 +9001,11 @@ sub sleep_if_needed { ) { return ; } - + $mysync->{maxsleep} = defined $mysync->{maxsleep} ? $mysync->{maxsleep} : $MAX_SLEEP ; # Must be positive $mysync->{maxsleep} = max( 0, $mysync->{maxsleep} ) ; - + my $time_spent = timesince( $mysync->{begin_transfer_time} ) ; my $sleep_max_messages = sleep_max_messages( $mysync->{nb_msg_transferred}, $time_spent, $mysync->{maxmessagespersecond} ) ; @@ -6975,7 +9029,8 @@ sub sleep_if_needed { return 0 ; } -sub sleep_max_messages { +sub sleep_max_messages +{ # how long we have to sleep to go under max_messages_per_second my( $nb_msg_transferred, $time_spent, $maxmessagespersecond ) = @_ ; if ( ( not defined $maxmessagespersecond ) or $maxmessagespersecond <= 0 ) { return( 0 ) } ; @@ -6985,7 +9040,8 @@ sub sleep_max_messages { } -sub tests_sleep_max_messages { +sub tests_sleep_max_messages +{ note( 'Entering tests_sleep_max_messages()' ) ; ok( 0 == sleep_max_messages( 4, 2, undef ), 'sleep_max_messages: maxmessagespersecond = undef') ; @@ -7000,7 +9056,8 @@ sub tests_sleep_max_messages { } -sub sleep_max_bytes { +sub sleep_max_bytes +{ # how long we have to sleep to go under max_bytes_per_second my( $total_bytes_to_consider, $time_spent, $maxbytespersecond ) = @_ ; $total_bytes_to_consider ||= 0 ; @@ -7014,7 +9071,8 @@ sub sleep_max_bytes { } -sub tests_sleep_max_bytes { +sub tests_sleep_max_bytes +{ note( 'Entering tests_sleep_max_bytes()' ) ; ok( 0 == sleep_max_bytes( 4000, 2, undef ), 'sleep_max_bytes: maxbytespersecond == undef => sleep 0' ) ; @@ -7031,24 +9089,154 @@ sub tests_sleep_max_bytes { } +sub delete_message_on_host1 +{ + my( $mysync, $h1_fold, $expunge, @h1_msg ) = @_ ; + if ( ! $mysync->{ delete1 } ) { return ; } + if ( ! @h1_msg ) { return ; } + delete_messages_on_any( + $mysync, + $mysync->{imap1}, + "Host1: $h1_fold", + $expunge, + $split1, + @h1_msg ) ; + return ; +} +sub tests_operators_and_exclam_precedence +{ + note( 'Entering tests_operators_and_exclam_precedence()' ) ; -# 6 GlobVar: $sync $imap1 $h1_nb_msg_deleted $expunge1 -sub delete_message_on_host1 { - my( $h1_msg, $h1_fold ) = @_ ; + is( 1, ! 0, 'tests_operators_and_exclam_precedence: ! 0 => 1' ) ; + is( "", ! 1, 'tests_operators_and_exclam_precedence: ! 1 => ""' ) ; + is( 1, not( 0 ), 'tests_operators_and_exclam_precedence: not( 0 ) => 1' ) ; + is( "", not( 1 ), 'tests_operators_and_exclam_precedence: not( 1 ) => ""' ) ; + + # I wrote those tests to avoid perlcrit "Mixed high and low-precedence booleans" + # and change sub delete_messages_on_any() but got 4 more warnings... So now commented. + + #is( 0, ( ! 0 and 0 ), 'tests_operators_and_exclam_precedence: ! 0 and 0 ) => 0' ) ; + #is( 1, ( ! 0 and 1 ), 'tests_operators_and_exclam_precedence: ! 0 and 1 ) => 1' ) ; + #is( "", ( ! 1 and 0 ), 'tests_operators_and_exclam_precedence: ! 1 and 0 ) => ""' ) ; + #is( "", ( ! 1 and 1 ), 'tests_operators_and_exclam_precedence: ! 1 and 1 ) => ""' ) ; + + is( 0, ( ! 0 && 0 ), 'tests_operators_and_exclam_precedence: ! 0 && 0 ) => 0' ) ; + is( 1, ( ! 0 && 1 ), 'tests_operators_and_exclam_precedence: ! 0 && 1 ) => 1' ) ; + is( "", ( ! 1 && 0 ), 'tests_operators_and_exclam_precedence: ! 1 && 0 ) => ""' ) ; + is( "", ( ! 1 && 1 ), 'tests_operators_and_exclam_precedence: ! 1 && 1 ) => ""' ) ; + + is( 2, ( ! 0 && 2 ), 'tests_operators_and_exclam_precedence: ! 0 && 2 ) => 1' ) ; + + note( 'Leaving tests_operators_and_exclam_precedence()' ) ; + return ; +} + +sub delete_messages_on_any +{ + my( $mysync, $imap, $hostX_folder, $expunge, $split, @messages ) = @_ ; my $expunge_message = q{} ; - $expunge_message = 'and expunged' if ( $expungeaftereach and $expunge1 ) ; - myprint( "Host1 msg $h1_fold/$h1_msg marked deleted $expunge_message $sync->{dry_message}\n" ) ; - if ( ! $sync->{dry} ) { - $imap1->delete_message( $h1_msg ) ; - $h1_nb_msg_deleted += 1 ; - $imap1->expunge( ) if ( $expungeaftereach and $expunge1 ) ; + + my $dry_message = $mysync->{ dry_message } ; + $expunge_message = 'and expunged' if ( $expunge ) ; + # "Host1: msg " + + $imap->Debug( 1 ) ; + + while ( my @messages_part = splice @messages, 0, $split ) + { + foreach my $message ( @messages_part ) + { + myprint( "$hostX_folder/$message marking deleted $expunge_message $dry_message\n" ) ; + } + if ( ! $mysync->{dry} && @messages_part ) + { + my $nb_deleted = $imap->delete_message( $imap->Range( @messages_part ) ) ; + if ( defined $nb_deleted ) + { + $mysync->{ h1_nb_msg_deleted } += $nb_deleted ; + } + else + { + my $error_imap = $imap->LastError || q{} ; + my $error = join( q{}, "$hostX_folder folder, could not delete ", + scalar @messages_part, ' messages: ', $error_imap, "\n" ) ; + errors_incr( $mysync, $error ) ; + } + } } + + if ( $expunge ) { + uidexpunge_or_expunge( $mysync, $imap, @messages ) ; + } + + $imap->Debug( 0 ) ; + return ; } -sub eta { +sub tests_uidexpunge_or_expunge +{ + note( 'Entering tests_uidexpunge_or_expunge()' ) ; + + + is( undef, uidexpunge_or_expunge( ), 'uidexpunge_or_expunge: no args => undef' ) ; + my $mysync ; + is( undef, uidexpunge_or_expunge( $mysync ), 'uidexpunge_or_expunge: undef args => undef' ) ; + $mysync = {} ; + is( undef, uidexpunge_or_expunge( $mysync ), 'uidexpunge_or_expunge: arg empty => undef' ) ; + my $imap ; + is( undef, uidexpunge_or_expunge( $mysync, $imap ), 'uidexpunge_or_expunge: undef Mail-IMAPClient instance => undef' ) ; + + require_ok( "Test::MockObject" ) ; + $imap = Test::MockObject->new( ) ; + is( undef, uidexpunge_or_expunge( $mysync, $imap ), 'uidexpunge_or_expunge: no message (1) to uidexpunge => undef' ) ; + + my @messages = ( ) ; + is( undef, uidexpunge_or_expunge( $mysync, $imap, @messages ), 'uidexpunge_or_expunge: no message (2) to uidexpunge => undef' ) ; + + @messages = ( '2', '1' ) ; + $imap->mock( 'uidexpunge', sub { return ; } ) ; + $imap->mock( 'expunge', sub { return ; } ) ; + is( undef, uidexpunge_or_expunge( $mysync, $imap, @messages ), 'uidexpunge_or_expunge: uidexpunge failure => expunge failure => undef' ) ; + + $imap->mock( 'expunge', sub { return 1 ; } ) ; + is( 1, uidexpunge_or_expunge( $mysync, $imap, @messages ), 'uidexpunge_or_expunge: uidexpunge failure => expunge ok => 1' ) ; + + $imap->mock( 'uidexpunge', sub { return 1 ; } ) ; + is( 1, uidexpunge_or_expunge( $mysync, $imap, @messages ), 'uidexpunge_or_expunge: messages to uidexpunge ok => 1' ) ; + + note( 'Leaving tests_uidexpunge_or_expunge()' ) ; + return ; +} + +sub uidexpunge_or_expunge +{ + my $mysync = shift ; + my $imap = shift ; + my @messages = @ARG ; + + if ( ! $imap ) { return ; } ; + if ( ! @messages ) { return ; } ; + + # Doing uidexpunge + my @uidexpunge_result = $imap->uidexpunge( @messages ) ; + if ( @uidexpunge_result ) { + return 1 ; + } + # Failure so doing expunge + my $expunge_result = $imap->expunge( ) ; + if ( $expunge_result ) { + return 1 ; + } + # bad trip + return ; +} + + +sub eta +{ my( $my_time_spent, $h1_nb_processed, $my_h1_nb_msg_start, $nb_transferred ) = @_ ; return( q{} ) if not $foldersizes ; @@ -7058,7 +9246,8 @@ sub eta { return( mysprintf( 'ETA: %s %1.0f s %s/%s msgs left', $eta_date, $time_remaining, $nb_msg_remaining, $my_h1_nb_msg_start ) ) ; } -sub time_remaining { +sub time_remaining +{ my( $my_time_spent, $h1_nb_processed, $my_h1_nb_msg_start, $nb_transferred ) = @_ ; @@ -7067,7 +9256,8 @@ sub time_remaining { } -sub tests_time_remaining { +sub tests_time_remaining +{ note( 'Entering tests_time_remaining()' ) ; @@ -7080,7 +9270,8 @@ sub tests_time_remaining { } -sub cache_map { +sub cache_map +{ my ( $cache_files_ref, $h1_msgs_ref, $h2_msgs_ref ) = @_; my ( %map1_2, %map2_1, %done2 ) ; @@ -7121,7 +9312,8 @@ sub cache_map { return( \%map1_2, \%map2_1) ; } -sub tests_cache_map { +sub tests_cache_map +{ note( 'Entering tests_cache_map()' ) ; #$debugcache = 1 ; @@ -7163,14 +9355,16 @@ sub tests_cache_map { } -sub cache_dir_fix { +sub cache_dir_fix +{ my $cache_dir = shift ; $cache_dir =~ s/([;<>\*\|`&\$!#\(\)\[\]\{\}:'"\\])/\\$1/xg ; #myprint( "cache_dir_fix: $cache_dir\n" ) ; return( $cache_dir ) ; } -sub tests_cache_dir_fix { +sub tests_cache_dir_fix +{ note( 'Entering tests_cache_dir_fix()' ) ; ok( 'lalala' eq cache_dir_fix('lalala'), 'cache_dir_fix: lalala -> lalala' ); @@ -7187,14 +9381,16 @@ sub tests_cache_dir_fix { return ; } -sub cache_dir_fix_win { +sub cache_dir_fix_win +{ my $cache_dir = shift ; $cache_dir =~ s/(\[|\])/[$1]/xg ; #myprint( "cache_dir_fix_win: $cache_dir\n" ) ; return( $cache_dir ) ; } -sub tests_cache_dir_fix_win { +sub tests_cache_dir_fix_win +{ note( 'Entering tests_cache_dir_fix_win()' ) ; ok( 'lalala' eq cache_dir_fix_win('lalala'), 'cache_dir_fix_win: lalala -> lalala' ); @@ -7207,13 +9403,14 @@ sub tests_cache_dir_fix_win { -sub get_cache { +sub get_cache +{ my ( $cache_dir, $h1_msgs_ref, $h2_msgs_ref, $h1_msgs_all_hash_ref, $h2_msgs_all_hash_ref ) = @_; $debugcache and myprint( "Entering get_cache\n" ) ; -d $cache_dir or return( undef ); # exit if cache directory doesn't exist - $debugcache and myprint( "cache_dir : $cache_dir\n" ) ; + $debugcache and myprint( "cache_dir : $cache_dir\n" ) ; if ( 'MSWin32' ne $OSNAME ) { @@ -7239,7 +9436,8 @@ sub get_cache { } -sub tests_get_cache { +sub tests_get_cache +{ note( 'Entering tests_get_cache()' ) ; ok( not( get_cache('/cache_no_exist') ), 'get_cache: /cache_no_exist' ); @@ -7330,7 +9528,8 @@ sub tests_get_cache { return ; } -sub match_a_cache_file { +sub match_a_cache_file +{ my $file = shift ; my ( $cache_uid1, $cache_uid2 ) ; @@ -7342,7 +9541,8 @@ sub match_a_cache_file { return( $cache_uid1, $cache_uid2 ) ; } -sub tests_match_a_cache_file { +sub tests_match_a_cache_file +{ note( 'Entering tests_match_a_cache_file()' ) ; my ( $tuid1, $tuid2 ) ; @@ -7378,7 +9578,8 @@ sub tests_match_a_cache_file { return ; } -sub clean_cache { +sub clean_cache +{ my ( $cache_files_ref, $cache_1_2_ref, $h1_msgs_all_hash_ref, $h2_msgs_all_hash_ref ) = @_ ; $debugcache and myprint( "Entering clean_cache\n" ) ; @@ -7404,7 +9605,8 @@ sub clean_cache { return( 1 ) ; } -sub tests_clean_cache { +sub tests_clean_cache +{ note( 'Entering tests_clean_cache()' ) ; ok( ( not -d 'W/tmp/cache/G1/G2' or rmtree( 'W/tmp/cache/G1/G2' )), 'clean_cache: rmtree W/tmp/cache/G1/G2' ) ; @@ -7458,7 +9660,8 @@ sub tests_clean_cache { return ; } -sub tests_clean_cache_2 { +sub tests_clean_cache_2 +{ note( 'Entering tests_clean_cache_2()' ) ; ok( ( not -d 'W/tmp/cache/G1/G2' or rmtree( 'W/tmp/cache/G1/G2' )), 'clean_cache_2: rmtree W/tmp/cache/G1/G2' ) ; @@ -7518,19 +9721,20 @@ sub tests_clean_cache_2 { -sub tests_mkpath { +sub tests_mkpath +{ note( 'Entering tests_mkpath()' ) ; ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' )), 'mkpath: mkpath W/tmp/tests/' ) ; - + SKIP: { skip( 'Tests only for Unix', 10 ) if ( 'MSWin32' eq $OSNAME ) ; my $long_path_unix = '123456789/' x 30 ; - ok( ( -d "W/tmp/tests/long/$long_path_unix" or mkpath( "W/tmp/tests/long/$long_path_unix" ) ), 'mkpath: mkpath 300 char' ) ; + ok( ( -d "W/tmp/tests/long/$long_path_unix" or mkpath( "W/tmp/tests/long/$long_path_unix" ) ), 'mkpath: mkpath 300 char' ) ; ok( -d "W/tmp/tests/long/$long_path_unix", 'mkpath: mkpath > 300 char verified' ) ; ok( ( -d "W/tmp/tests/long/$long_path_unix" and rmtree( 'W/tmp/tests/long/' ) ), 'mkpath: rmtree 300 char' ) ; ok( ! -d "W/tmp/tests/long/$long_path_unix", 'mkpath: rmtree 300 char verified' ) ; - + ok( ( -d 'W/tmp/tests/trailing_dots...' or mkpath( 'W/tmp/tests/trailing_dots...' ) ), 'mkpath: mkpath trailing_dots...' ) ; ok( -d 'W/tmp/tests/trailing_dots...', 'mkpath: mkpath trailing_dots... verified' ) ; ok( ( -d 'W/tmp/tests/trailing_dots...' and rmtree( 'W/tmp/tests/trailing_dots...' ) ), 'mkpath: rmtree trailing_dots...' ) ; @@ -7559,7 +9763,7 @@ sub tests_mkpath { # Without the eval the following mkpath 300 just kill the whole process without a whisper #myprint( "$long_path_300\n" ) ; - eval { ok( ( -d $long_path_300 or mkpath( $long_path_300 ) ), 'mkpath: create a path with 300 characters' ) ; } + eval { ok( ( -d $long_path_300 or mkpath( $long_path_300 ) ), 'mkpath: create a path with 300 characters' ) ; } or ok( 1, 'mkpath: can not create a path with 300 characters' ) ; ok( ( ( ! -d $long_path_300 ) or -d $long_path_300 and rmtree( $long_path_300 ) ), 'mkpath: rmtree the 300 character path' ) ; ok( 1, 'mkpath: still alive' ) ; @@ -7568,8 +9772,8 @@ sub tests_mkpath { ok( -d 'W/tmp/tests/trailing_dots...', 'mkpath: mkpath trailing_dots... verified' ) ; ok( ( -d 'W/tmp/tests/trailing_dots...' and rmtree( 'W/tmp/tests/trailing_dots...' ) ), 'mkpath: rmtree trailing_dots...' ) ; ok( ! -d 'W/tmp/tests/trailing_dots...', 'mkpath: rmtree trailing_dots... verified' ) ; - - + + } ; note( 'Leaving tests_mkpath()' ) ; @@ -7577,7 +9781,8 @@ sub tests_mkpath { return 1 ; } -sub tests_touch { +sub tests_touch +{ note( 'Entering tests_touch()' ) ; ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' )), 'touch: mkpath W/tmp/tests/' ) ; @@ -7592,7 +9797,8 @@ sub tests_touch { } -sub touch { +sub touch +{ my @files = @_ ; my $failures = 0 ; @@ -7609,7 +9815,8 @@ sub touch { } -sub tests_tmpdir_has_colon_bug { +sub tests_tmpdir_has_colon_bug +{ note( 'Entering tests_tmpdir_has_colon_bug()' ) ; ok( 0 == tmpdir_has_colon_bug( q{} ), 'tmpdir_has_colon_bug: ' ) ; @@ -7621,7 +9828,8 @@ sub tests_tmpdir_has_colon_bug { return ; } -sub tmpdir_has_colon_bug { +sub tmpdir_has_colon_bug +{ my $path = shift ; my $path_filtered = filter_forbidden_characters( $path ) ; @@ -7632,7 +9840,8 @@ sub tmpdir_has_colon_bug { return( 0 ) ; } -sub tmpdir_fix_colon_bug { +sub tmpdir_fix_colon_bug +{ my $mysync = shift ; my $err = 0 ; if ( not (-d $mysync->{ tmpdir } and -r _ and -w _) ) { @@ -7679,10 +9888,10 @@ sub tmpdir_fix_colon_bug { } -sub tests_cache_folder { +sub tests_cache_folder +{ note( 'Entering tests_cache_folder()' ) ; - ok( '/path/fold1/fold2' eq cache_folder( q{}, '/path', 'fold1', 'fold2'), 'cache_folder: /path, fold1, fold2 -> /path/fold1/fold2' ) ; ok( '/pa_th/fold1/fold2' eq cache_folder( q{}, '/pa*th', 'fold1', 'fold2'), 'cache_folder: /pa*th, fold1, fold2 -> /path/fold1/fold2' ) ; ok( '/_p_a__th/fol_d1/fold2' eq cache_folder( q{}, '/>pp /path/fol_d1/fold2' ) ; @@ -7697,11 +9906,12 @@ sub tests_cache_folder { return ; } -sub cache_folder { +sub cache_folder +{ my( $cache_base, $cache_dir, $h1_fold, $h2_fold ) = @_ ; - my $sep_1 = $h1_sep || '/'; - my $sep_2 = $h2_sep || '/'; + my $sep_1 = $sync->{ h1_sep } || '/'; + my $sep_2 = $sync->{ h2_sep } || '/'; #myprint( "$cache_dir h1_fold $h1_fold sep1 $sep_1 h2_fold $h2_fold sep2 $sep_2\n" ) ; $h1_fold = convert_sep_to_slash( $h1_fold, $sep_1 ) ; @@ -7712,24 +9922,10 @@ sub cache_folder { return( $cache_folder ) ; } -sub filter_forbidden_characters { - my $string = shift ; - - if ( ! defined $string ) { return ; } - - if ( 'MSWin32' eq $OSNAME ) { - # Move trailing whitespace to _ " a b /c d " -> " a b_/c d_" - $string =~ s{\ (/|$)}{_$1}xg ; - } - $string =~ s{[\Q*|?:"<>\E\t\r\n\\]}{_}xg ; - #myprint( "[$string]\n" ) ; - return( $string ) ; -} - -sub tests_filter_forbidden_characters { +sub tests_filter_forbidden_characters +{ note( 'Entering tests_filter_forbidden_characters()' ) ; - ok( 'a_b' eq filter_forbidden_characters( 'a_b' ), 'filter_forbidden_characters: a_b -> a_b' ) ; ok( 'a_b' eq filter_forbidden_characters( 'a*b' ), 'filter_forbidden_characters: a*b -> a_b' ) ; ok( 'a_b' eq filter_forbidden_characters( 'a|b' ), 'filter_forbidden_characters: a|b -> a_b' ) ; @@ -7756,14 +9952,23 @@ sub tests_filter_forbidden_characters { return ; } -sub convert_sep_to_slash { - my ( $folder, $sep ) = @_ ; +sub filter_forbidden_characters +{ + my $string = shift ; - $folder =~ s{\Q$sep\E}{/}xg ; - return( $folder ) ; + if ( ! defined $string ) { return ; } + + if ( 'MSWin32' eq $OSNAME ) { + # Move trailing whitespace to _ " a b /c d " -> " a b_/c d_" + $string =~ s{\ (/|$)}{_$1}xg ; + } + $string =~ s{[\Q*|?:"<>\E\t\r\n\\]}{_}xg ; + #myprint( "[$string]\n" ) ; + return( $string ) ; } -sub tests_convert_sep_to_slash { +sub tests_convert_sep_to_slash +{ note( 'Entering tests_convert_sep_to_slash()' ) ; @@ -7779,8 +9984,17 @@ sub tests_convert_sep_to_slash { return ; } +sub convert_sep_to_slash +{ + my ( $folder, $sep ) = @_ ; -sub tests_regexmess { + $folder =~ s{\Q$sep\E}{/}xg ; + return( $folder ) ; +} + + +sub tests_regexmess +{ note( 'Entering tests_regexmess()' ) ; ok( 'blabla' eq regexmess( 'blabla' ), 'regexmess, no regexmess, nothing to do' ) ; @@ -7826,7 +10040,7 @@ sub tests_regexmess { eq regexmess("\n" . 'From '), 'From mbox 3 remove'); - #myprint( "[", regexmess("From zzz\n" . 'From '), "]" ) ; + #myprint( "[", regexmess("From zzz\n" . 'From '), "]" ) ; ok( q{} . 'From ' eq regexmess("From zzz\n" . 'From '), 'From mbox 4 remove'); @@ -8266,23 +10480,25 @@ EOM } -sub regexmess { +sub regexmess +{ my ( $string ) = @_ ; foreach my $regexmess ( @regexmess ) { - $debug and myprint( "eval \$string =~ $regexmess\n" ) ; + $sync->{ debug } and myprint( "eval \$string =~ $regexmess\n" ) ; my $ret = eval "\$string =~ $regexmess ; 1" ; - #myprint( "eval [$ret]\n" ) ; + #myprint( "eval [$ret]\n" ) ; if ( ( not $ret ) or $EVAL_ERROR ) { - myprint( "Error: eval regexmess '$regexmess': $EVAL_ERROR" ) ; + myprint( "Error: eval regexmess '$regexmess': $EVAL_ERROR" ) ; return( undef ) ; } } - $debug and myprint( "$string\n" ) ; + $sync->{ debug } and myprint( "$string\n" ) ; return( $string ) ; } -sub tests_skipmess { +sub tests_skipmess +{ note( 'Entering tests_skipmess()' ) ; ok( not( defined skipmess( 'blabla' ) ), 'skipmess, no skipmess, no skip' ) ; @@ -8503,17 +10719,18 @@ EOM return ; } -sub skipmess { +sub skipmess +{ my ( $string ) = @_ ; my $match ; - #myprint( "$string\n" ) ; + #myprint( "$string\n" ) ; foreach my $skipmess ( @skipmess ) { - $debug and myprint( "eval \$match = \$string =~ $skipmess\n" ) ; - my $ret = eval "\$match = \$string =~ $skipmess ; 1" ; - #myprint( "eval [$ret]\n" ) ; - $debug and myprint( "match [$match]\n" ) ; + $sync->{ debug } and myprint( "eval \$match = \$string =~ $skipmess\n" ) ; + my $ret = eval "\$match = \$string =~ $skipmess ; 1" ; + #myprint( "eval [$ret]\n" ) ; + $sync->{ debug } and myprint( "match [$match]\n" ) ; if ( ( not $ret ) or $EVAL_ERROR ) { - myprint( "Error: eval skipmess '$skipmess': $EVAL_ERROR" ) ; + myprint( "Error: eval skipmess '$skipmess': $EVAL_ERROR" ) ; return( undef ) ; } return( $match ) if ( $match ) ; @@ -8524,7 +10741,8 @@ sub skipmess { -sub tests_bytes_display_string { +sub tests_bytes_display_string +{ note( 'Entering tests_bytes_display_string()' ) ; @@ -8552,13 +10770,14 @@ sub tests_bytes_display_string { ok( '1048576.000 PiB' eq bytes_display_string( 1_180_591_620_717_411_303_424 ), 'bytes_display_string: 1_180_591_620_717_411_303_424' ) ; - #myprint( bytes_display_string( 1_180_591_620_717_411_303_424 ), "\n" ) ; + #myprint( bytes_display_string( 1_180_591_620_717_411_303_424 ), "\n" ) ; note( 'Leaving tests_bytes_display_string()' ) ; return ; } -sub bytes_display_string { +sub bytes_display_string +{ my ( $bytes ) = @_ ; my $readable_value = q{} ; @@ -8594,11 +10813,48 @@ sub bytes_display_string { } # if you have exabytes (EiB) of email to transfer, you have too much email! } - #myprint( "$bytes = $readable_value\n" ) ; + #myprint( "$bytes = $readable_value\n" ) ; return( $readable_value ) ; } -sub stats { + +sub tests_useheader_suggestion +{ + note( 'Entering tests_useheader_suggestion()' ) ; + + is( undef, useheader_suggestion( ), 'useheader_suggestion: no args => undef' ) ; + my $mysync = {} ; + + $mysync->{ h1_nb_msg_noheader } = 0 ; + is( q{}, useheader_suggestion( $mysync ), 'useheader_suggestion: h1_nb_msg_noheader count null => no suggestion' ) ; + $mysync->{ h1_nb_msg_noheader } = 2 ; + is( q{in order to sync those 2 unidentified messages, add option --addheader}, useheader_suggestion( $mysync ), + 'useheader_suggestion: h1_nb_msg_noheader count 2 => suggestion of --addheader' ) ; + + note( 'Leaving tests_useheader_suggestion()' ) ; + return ; +} + +sub useheader_suggestion +{ + my $mysync = shift ; + if ( ! defined $mysync->{ h1_nb_msg_noheader } ) + { + return ; + } + elsif ( 1 <= $mysync->{ h1_nb_msg_noheader } ) + { + return qq{in order to sync those $mysync->{ h1_nb_msg_noheader } unidentified messages, add option --addheader} ; + } + else + { + return q{} ; + } + return ; +} + +sub stats +{ my $mysync = shift ; if ( ! $mysync->{stats} ) { @@ -8615,74 +10871,76 @@ sub stats { my $memory_ratio = ($max_msg_size_in_bytes) ? mysprintf('%.1f', $memory_consumption_at_end / $max_msg_size_in_bytes) : 'NA' ; - - myprint( "++++ Statistics\n" ) ; - myprint( "Transfer started on : $timestart_str\n" ) ; - myprint( "Transfer ended on : $timeend_str\n" ) ; - myprintf( "Transfer time : %.1f sec\n", $timediff ) ; - myprint( "Folders synced : $h1_folders_wanted_ct/$h1_folders_wanted_nb synced\n" ) ; - myprint( "Messages transferred : $mysync->{nb_msg_transferred} " ) ; + # my $useheader_suggestion = useheader_suggestion( $mysync ) ; + myprint( "++++ Statistics\n" ) ; + myprint( "Transfer started on : $timestart_str\n" ) ; + myprint( "Transfer ended on : $timeend_str\n" ) ; + myprintf( "Transfer time : %.1f sec\n", $timediff ) ; + myprint( "Folders synced : $h1_folders_wanted_ct/$h1_folders_wanted_nb synced\n" ) ; + myprint( "Messages transferred : $mysync->{nb_msg_transferred} " ) ; myprint( "(could be $nb_msg_skipped_dry_mode without dry mode)" ) if ( $mysync->{dry} ) ; myprint( "\n" ) ; - myprint( "Messages skipped : $nb_msg_skipped\n" ) ; - myprint( "Messages found duplicate on host1 : $h1_nb_msg_duplicate\n" ) ; - myprint( "Messages found duplicate on host2 : $h2_nb_msg_duplicate\n" ) ; - myprint( "Messages void (noheader) on host1 : $h1_nb_msg_noheader\n" ) ; - myprint( "Messages void (noheader) on host2 : $h2_nb_msg_noheader\n" ) ; - myprint( "Messages deleted on host1 : $h1_nb_msg_deleted\n" ) ; - myprint( "Messages deleted on host2 : $h2_nb_msg_deleted\n" ) ; - myprintf( "Total bytes transferred : %s (%s)\n", + myprint( "Messages skipped : $mysync->{ nb_msg_skipped }\n" ) ; + myprint( "Messages found duplicate on host1 : $h1_nb_msg_duplicate\n" ) ; + myprint( "Messages found duplicate on host2 : $h2_nb_msg_duplicate\n" ) ; + myprint( "Messages found crossduplicate on host2 : $mysync->{ h2_nb_msg_crossdup }\n" ) ; + myprint( "Messages void (noheader) on host1 : $mysync->{ h1_nb_msg_noheader } ", useheader_suggestion( $mysync ), "\n" ) ; + myprint( "Messages void (noheader) on host2 : $h2_nb_msg_noheader\n" ) ; + nb_messages_in_1_not_in_2( $mysync ) ; + nb_messages_in_2_not_in_1( $mysync ) ; + myprintf( "Messages found in host1 not in host2 : %s messages\n", $mysync->{ nb_messages_in_1_not_in_2 } ) ; + myprintf( "Messages found in host2 not in host1 : %s messages\n", $mysync->{ nb_messages_in_2_not_in_1 } ) ; + myprint( "Messages deleted on host1 : $mysync->{ h1_nb_msg_deleted }\n" ) ; + myprint( "Messages deleted on host2 : $h2_nb_msg_deleted\n" ) ; + myprintf( "Total bytes transferred : %s (%s)\n", $mysync->{total_bytes_transferred}, bytes_display_string( $mysync->{total_bytes_transferred} ) ) ; - myprintf( "Total bytes duplicate host1 : %s (%s)\n", - $h1_total_bytes_duplicate, - bytes_display_string( $h1_total_bytes_duplicate) ) ; - myprintf( "Total bytes duplicate host2 : %s (%s)\n", - $h2_total_bytes_duplicate, - bytes_display_string( $h2_total_bytes_duplicate) ) ; - myprintf( "Total bytes skipped : %s (%s)\n", - $total_bytes_skipped, - bytes_display_string( $total_bytes_skipped ) ) ; - myprintf( "Total bytes error : %s (%s)\n", - $total_bytes_error, - bytes_display_string( $total_bytes_error ) ) ; + myprintf( "Total bytes skipped : %s (%s)\n", + $mysync->{ total_bytes_skipped }, + bytes_display_string( $mysync->{ total_bytes_skipped } ) ) ; $timediff ||= 1 ; # No division per 0 - myprintf("Message rate : %.1f messages/s\n", $mysync->{nb_msg_transferred} / $timediff ) ; - myprintf("Average bandwidth rate : %.1f KiB/s\n", $mysync->{total_bytes_transferred} / $KIBI / $timediff ) ; - myprint( "Reconnections to host1 : $mysync->{imap1}->{IMAPSYNC_RECONNECT_COUNT}\n" ) ; - myprint( "Reconnections to host2 : $mysync->{imap2}->{IMAPSYNC_RECONNECT_COUNT}\n" ) ; - myprintf("Memory consumption at the end : %.1f MiB (started with %.1f MiB)\n", + myprintf("Message rate : %.1f messages/s\n", $mysync->{nb_msg_transferred} / $timediff ) ; + myprintf("Average bandwidth rate : %.1f KiB/s\n", $mysync->{total_bytes_transferred} / $KIBI / $timediff ) ; + myprint( "Reconnections to host1 : $mysync->{imap1}->{IMAPSYNC_RECONNECT_COUNT}\n" ) ; + myprint( "Reconnections to host2 : $mysync->{imap2}->{IMAPSYNC_RECONNECT_COUNT}\n" ) ; + myprintf("Memory consumption at the end : %.1f MiB (started with %.1f MiB)\n", $memory_consumption_at_end / $KIBI / $KIBI, $memory_consumption_at_start / $KIBI / $KIBI ) ; - myprintf("Biggest message : %s bytes (%s)\n", + myprint( "Load end is : " . ( join( q{ }, loadavg( ) ) || 'unknown' ), " on $mysync->{cpu_number} cores\n" ) ; + + myprintf("Biggest message : %s bytes (%s)\n", $max_msg_size_in_bytes, bytes_display_string( $max_msg_size_in_bytes) ) ; - myprint( "Memory/biggest message ratio : $memory_ratio\n" ) ; + myprint( "Memory/biggest message ratio : $memory_ratio\n" ) ; if ( $foldersizesatend and $foldersizes ) { my $nb_msg_start_diff = diff_or_NA( $h2_nb_msg_start, $h1_nb_msg_start ) ; my $bytes_start_diff = diff_or_NA( $h2_bytes_start, $h1_bytes_start ) ; - myprintf("Start difference host2 - host1 : %s messages, %s bytes (%s)\n", $nb_msg_start_diff, + myprintf("Start difference host2 - host1 : %s messages, %s bytes (%s)\n", $nb_msg_start_diff, $bytes_start_diff, bytes_display_string( $bytes_start_diff ) ) ; my $nb_msg_end_diff = diff_or_NA( $h2_nb_msg_end, $h1_nb_msg_end ) ; my $bytes_end_diff = diff_or_NA( $h2_bytes_end, $h1_bytes_end ) ; - myprintf("Final difference host2 - host1 : %s messages, %s bytes (%s)\n", $nb_msg_end_diff, + myprintf("Final difference host2 - host1 : %s messages, %s bytes (%s)\n", $nb_msg_end_diff, $bytes_end_diff, bytes_display_string( $bytes_end_diff ) ) ; } - myprint( "Detected $mysync->{nb_errors} errors\n\n" ) ; - myprint( $warn_release, "\n" ) ; - myprint( homepage( ), "\n" ) ; + comment_on_final_diff_in_1_not_in_2( $mysync ) ; + comment_on_final_diff_in_2_not_in_1( $mysync ) ; + myprint( "Detected $mysync->{nb_errors} errors\n\n" ) ; + + myprint( $warn_release, "\n" ) ; + myprint( homepage( ), "\n" ) ; return ; } -sub diff_or_NA { +sub diff_or_NA +{ my( $n1, $n2 ) = @ARG ; if ( not defined $n1 or not defined $n2 ) { @@ -8697,7 +10955,8 @@ sub diff_or_NA { return( $n1 - $n2 ) ; } -sub match_number { +sub match_number +{ my $n = shift @ARG ; if ( not defined $n ) { @@ -8712,7 +10971,8 @@ sub match_number { } -sub tests_match_number { +sub tests_match_number +{ note( 'Entering tests_match_number()' ) ; @@ -8730,7 +10990,8 @@ sub tests_match_number { -sub tests_diff_or_NA { +sub tests_diff_or_NA +{ note( 'Entering tests_diff_or_NA()' ) ; @@ -8752,12 +11013,14 @@ sub tests_diff_or_NA { return ; } -sub homepage { +sub homepage +{ return( 'Homepage: https://imapsync.lamiral.info/' ) ; } -sub load_modules { +sub load_modules +{ if ( $sync->{ssl1} or $sync->{ssl2} or $sync->{tls1} @@ -8774,15 +11037,16 @@ sub load_modules { -sub parse_header_msg { +sub parse_header_msg +{ my ( $mysync, $imap, $m_uid, $s_heads, $s_fir, $side, $s_hash ) = @_ ; my $head = $s_heads->{$m_uid} ; my $headnum = scalar keys %{ $head } ; - $debug and myprint( "$side uid $m_uid head nb pass one: ", $headnum, "\n" ) ; + $mysync->{ debug } and myprint( "$side: uid $m_uid number of headers, pass one: ", $headnum, "\n" ) ; if ( ( ! $headnum ) and ( $wholeheaderifneeded ) ){ - myprint( "$side uid $m_uid no header by parse_headers so taking whole header with BODY.PEEK[HEADER]\n" ) ; + myprint( "$side: uid $m_uid no header by parse_headers so taking whole header with BODY.PEEK[HEADER]\n" ) ; $imap->fetch($m_uid, 'BODY.PEEK[HEADER]' ) ; my $whole_header = $imap->_transaction_literals ; @@ -8790,7 +11054,7 @@ sub parse_header_msg { $head = decompose_header( $whole_header ) ; $headnum = scalar keys %{ $head } ; - $debug and myprint( "$side uid $m_uid head nb pass two: ", $headnum, "\n" ) ; + $mysync->{ debug } and myprint( "$side: uid $m_uid number of headers, pass two: ", $headnum, "\n" ) ; } #myprint( Data::Dumper->Dump( [ $head, \%useheader ] ) ) ; @@ -8801,7 +11065,7 @@ sub parse_header_msg { if ( ( ! $headstr ) and ( $mysync->{addheader} ) and ( $side eq 'Host1' ) ) { my $header = add_header( $m_uid ) ; - myprint( "Host1 uid $m_uid no header found so adding our own [$header]\n" ) ; + myprint( "$side: uid $m_uid no header found so adding our own [$header]\n" ) ; $headstr .= uc $header ; $s_fir->{$m_uid}->{NO_HEADER} = 1; } @@ -8813,7 +11077,7 @@ sub parse_header_msg { my $idate = $s_fir->{$m_uid}->{'INTERNALDATE'} ; $size = length $headstr unless ( $size ) ; my $m_md5 = md5_base64( $headstr ) ; - $debug and myprint( "$side uid $m_uid sig $m_md5 size $size idate $idate\n" ) ; + $mysync->{ debug } and myprint( "$side: uid $m_uid sig $m_md5 size $size idate $idate\n" ) ; my $key ; if ($skipsize) { $key = "$m_md5"; @@ -8832,7 +11096,8 @@ sub parse_header_msg { return( 1 ) ; } -sub header_construct { +sub header_construct +{ my( $head, $side, $m_uid ) = @_ ; @@ -8846,10 +11111,10 @@ sub header_construct { my $H = header_line_normalize( $h, $val ) ; # show stuff in debug mode - $debug and myprint( "$side uid $m_uid header [$H]", "\n" ) ; + $sync->{ debug } and myprint( "$side uid $m_uid header [$H]", "\n" ) ; if ($skipheader and $H =~ m/$skipheader/xi) { - $debug and myprint( "$side uid $m_uid skipping header [$H]\n" ) ; + $sync->{ debug } and myprint( "$side uid $m_uid skipping header [$H]\n" ) ; next ; } $headstr .= "$H" ; @@ -8859,7 +11124,8 @@ sub header_construct { } -sub header_line_normalize { +sub header_line_normalize +{ my( $header_key, $header_val ) = @_ ; # no 8-bit data in headers ! @@ -8888,7 +11154,8 @@ sub header_line_normalize { return( $header_line ) ; } -sub tests_header_line_normalize { +sub tests_header_line_normalize +{ note( 'Entering tests_header_line_normalize()' ) ; @@ -8905,52 +11172,130 @@ sub tests_header_line_normalize { } -sub firstline { +sub tests_firstline +{ + note( 'Entering tests_firstline()' ) ; + + is( q{}, firstline( 'W/tmp/tests/noexist.txt' ), 'firstline: getting empty string from inexisting W/tmp/tests/noexist.txt' ) ; + + ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'firstline: mkpath W/tmp/tests/' ) ; + + is( "blabla\n" , string_to_file( "blabla\n", 'W/tmp/tests/firstline.txt' ), 'firstline: put blabla in W/tmp/tests/firstline.txt' ) ; + is( 'blabla' , firstline( 'W/tmp/tests/firstline.txt' ), 'firstline: get blabla from W/tmp/tests/firstline.txt' ) ; + + is( q{} , string_to_file( q{}, 'W/tmp/tests/firstline2.txt' ), 'firstline: put empty string in W/tmp/tests/firstline2.txt' ) ; + is( q{} , firstline( 'W/tmp/tests/firstline2.txt' ), 'firstline: get empty string from W/tmp/tests/firstline2.txt' ) ; + + is( "\n" , string_to_file( "\n", 'W/tmp/tests/firstline3.txt' ), 'firstline: put CR in W/tmp/tests/firstline3.txt' ) ; + is( q{} , firstline( 'W/tmp/tests/firstline3.txt' ), 'firstline: get empty string from W/tmp/tests/firstline3.txt' ) ; + + is( "blabla\nTiti\n" , string_to_file( "blabla\nTiti\n", 'W/tmp/tests/firstline4.txt' ), 'firstline: put blabla\nTiti\n in W/tmp/tests/firstline4.txt' ) ; + is( 'blabla' , firstline( 'W/tmp/tests/firstline4.txt' ), 'firstline: get blabla from W/tmp/tests/firstline4.txt' ) ; + + note( 'Leaving tests_firstline()' ) ; + return ; +} + +sub firstline +{ # extract the first line of a file (without \n) + # return empty string if error or empty string - my( $file ) = @_ ; - my $line = q{} ; + my $file = shift ; + my $line ; - if ( ! -e $file ) { - myprint( "Cannot open file $file since it does not exist\n" ) ; - return ; - } - - open my $FILE, '<', $file or do { - myprint( "Error opening file $file : $OS_ERROR\n" ) ; - return ; - } ; - $line = <$FILE> || q{} ; - close $FILE ; - chomp $line ; + $line = nthline( $file, 1 ) ; return $line ; } -sub tests_firstline { - note( 'Entering tests_firstline()' ) ; - is( undef , firstline( 'W/tmp/tests/noexist.txt' ), 'tests_firstline: not getting blabla from W/tmp/tests/noexist.txt' ) ; - is( "blabla\n" , string_to_file( "blabla\n", 'W/tmp/tests/firstline.txt' ), 'tests_firstline: put blabla in W/tmp/tests/firstline.txt' ) ; - is( 'blabla' , firstline( 'W/tmp/tests/firstline.txt' ), 'tests_firstline: get blabla from W/tmp/tests/firstline.txt' ) ; - is( q{} , string_to_file( q{}, 'W/tmp/tests/firstline2.txt' ), 'tests_firstline: put empty string in W/tmp/tests/firstline2.txt' ) ; - is( q{} , firstline( 'W/tmp/tests/firstline2.txt' ), 'tests_firstline: get empty string from W/tmp/tests/firstline2.txt' ) ; - is( "\n" , string_to_file( "\n", 'W/tmp/tests/firstline3.txt' ), 'tests_firstline: put CR in W/tmp/tests/firstline3.txt' ) ; - is( q{} , firstline( 'W/tmp/tests/firstline3.txt' ), 'tests_firstline: get empty string from W/tmp/tests/firstline3.txt' ) ; - note( 'Leaving tests_firstline()' ) ; +sub tests_secondline +{ + note( 'Entering tests_secondline()' ) ; + + is( q{}, secondline( 'W/tmp/tests/noexist.txt' ), 'secondline: getting empty string from inexisting W/tmp/tests/noexist.txt' ) ; + is( q{}, secondline( 'W/tmp/tests/noexist.txt', 2 ), 'secondline: 2nd getting empty string from inexisting W/tmp/tests/noexist.txt' ) ; + + ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'secondline: mkpath W/tmp/tests/' ) ; + + is( "L1\nL2\nL3\nL4\n" , string_to_file( "L1\nL2\nL3\nL4\n", 'W/tmp/tests/secondline.txt' ), 'secondline: put L1\nL2\nL3\nL4\n in W/tmp/tests/secondline.txt' ) ; + is( 'L2' , secondline( 'W/tmp/tests/secondline.txt' ), 'secondline: get L2 from W/tmp/tests/secondline.txt' ) ; + + + note( 'Leaving tests_secondline()' ) ; return ; } +sub secondline +{ + # extract the second line of a file (without \n) + # return empty string if error or empty string + + my $file = shift ; + my $line ; + + $line = nthline( $file, 2 ) ; + return $line ; +} + + + + +sub tests_nthline +{ + note( 'Entering tests_nthline()' ) ; + + is( q{}, nthline( 'W/tmp/tests/noexist.txt' ), 'nthline: getting empty string from inexisting W/tmp/tests/noexist.txt' ) ; + is( q{}, nthline( 'W/tmp/tests/noexist.txt', 2 ), 'nthline: 2nd getting empty string from inexisting W/tmp/tests/noexist.txt' ) ; + + ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'nthline: mkpath W/tmp/tests/' ) ; + + is( "L1\nL2\nL3\nL4\n" , string_to_file( "L1\nL2\nL3\nL4\n", 'W/tmp/tests/nthline.txt' ), 'nthline: put L1\nL2\nL3\nL4\n in W/tmp/tests/nthline.txt' ) ; + is( 'L3' , nthline( 'W/tmp/tests/nthline.txt', 3 ), 'nthline: get L3 from W/tmp/tests/nthline.txt' ) ; + + + note( 'Leaving tests_nthline()' ) ; + return ; +} + + +sub nthline +{ + # extract the nth line of a file (without \n) + # return empty string if error or empty string + + my $file = shift ; + my $num = shift ; + + if ( ! all_defined( $file, $num ) ) { return q{} ; } + + my $line ; + + $line = ( file_to_array( $file ) )[$num - 1] ; + if ( ! defined $line ) + { + return q{} ; + } + else + { + chomp $line ; + return $line ; + } + +} + # Should be unit tested and then be used by file_to_string, refactoring file_to_string -sub file_to_array { +sub file_to_array +{ my( $file ) = shift ; my @string ; open my $FILE, '<', $file or do { - myprint( "Error reading file $file : $OS_ERROR" ) ; + myprint( "Error reading file $file : $OS_ERROR\n" ) ; return ; } ; @string = <$FILE> ; @@ -8959,7 +11304,8 @@ sub file_to_array { } -sub tests_file_to_string { +sub tests_file_to_string +{ note( 'Entering tests_file_to_string()' ) ; is( undef, file_to_string( ), 'file_to_string: no args => undef' ) ; @@ -8967,7 +11313,7 @@ sub tests_file_to_string { is( undef, file_to_string( '/' ), 'file_to_string: reading a directory => undef' ) ; ok( file_to_string( $PROGRAM_NAME ), 'file_to_string: reading myself' ) ; - ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'file_to_string: mkpath W/tmp/tests/' ) ; + ok( (-d 'W/tmp/tests/' or mkpath( 'W/tmp/tests/' ) ), 'file_to_string: mkpath W/tmp/tests/' ) ; is( 'lilili', string_to_file( 'lilili', 'W/tmp/tests/canbewritten' ), 'file_to_string: string_to_file filling W/tmp/tests/canbewritten with lilili' ) ; is( 'lilili', file_to_string( 'W/tmp/tests/canbewritten' ), 'file_to_string: reading W/tmp/tests/canbewritten is lilili' ) ; @@ -8979,7 +11325,8 @@ sub tests_file_to_string { return ; } -sub file_to_string { +sub file_to_string +{ my $file = shift ; if ( ! $file ) { return ; } if ( ! -e $file ) { return ; } @@ -8997,7 +11344,8 @@ sub file_to_string { } -sub tests_string_to_file { +sub tests_string_to_file +{ note( 'Entering tests_string_to_file()' ) ; is( undef, string_to_file( ), 'string_to_file: no args => undef' ) ; @@ -9017,7 +11365,8 @@ sub tests_string_to_file { return ; } -sub string_to_file { +sub string_to_file +{ my( $string, $file ) = @_ ; if( ! defined $string ) { return ; } if( ! defined $file ) { return ; } @@ -9036,7 +11385,7 @@ sub string_to_file { return $string ; } -q^ +0 and <<'MULTILINE_COMMENT' ; This is a multiline comment. Based on David Carter discussion, to do: * Call parameters stay the same. @@ -9050,9 +11399,11 @@ OK * in case of CHILD_ERROR, return( undef, $error ) * in case of good command and final $string empty, consider it like CHILD_ERROR => return( undef, $error ) and print $error, with folder/UID/maybeSubject context, on console and at the end with the final error listing. Count this as a sync error. -^ if 0 ; # End of multiline comment. +MULTILINE_COMMENT +# End of multiline comment. -sub pipemess { +sub pipemess +{ my ( $string, @commands ) = @_ ; my $error = q{} ; foreach my $command ( @commands ) { @@ -9085,7 +11436,7 @@ sub pipemess { myprint( qq{STDERR of --pipemess "$command": $error_cmd\n} ) ; } } - #myprint( "[$string]\n" ) ; + #myprint( "[$string]\n" ) ; if ( wantarray ) { return ( $string, $error ) ; }else{ @@ -9095,7 +11446,8 @@ sub pipemess { -sub tests_pipemess { +sub tests_pipemess +{ note( 'Entering tests_pipemess()' ) ; @@ -9148,7 +11500,7 @@ sub tests_pipemess { ( $stringT, $errorT ) = pipemess( 'dontcare', 'true' ) ; is( $stringT, undef, 'pipemess: list context, true but no output, string' ) ; - like( $errorT, qr{\QFailure: --pipemess command "true" ended with "0" characters exit value "0" and STDERR ""\E}xm, 'pipemess: list context, true but no output, error' ) ; + like( $errorT, qr{\QFailure: --pipemess command "true" ended with "0" characters exit value "0" and STDERR ""\E}xm, 'pipemess: list context, true but no output, error' ) ; ( $stringT, $errorT ) = pipemess( 'dontcare', 'false' ) ; is( $stringT, undef, 'pipemess: list context, false and no output, string' ) ; @@ -9162,12 +11514,12 @@ sub tests_pipemess { ( $stringT, $errorT ) = pipemess( 'dontcare', '( echo -n blablabla 3>&1 1>&2 2>&3 )' ) ; is( $stringT, undef, 'pipemess: list context, "no output STDERR blablabla", string' ) ; - like( $errorT, qr{blablabla"}xm, 'pipemess: list context, "no output STDERR blablabla", error' ) ; + like( $errorT, qr{blablabla"}xm, 'pipemess: list context, "no output STDERR blablabla", error' ) ; ( $stringT, $errorT ) = pipemess( 'dontcare', '( echo -n blablabla 3>&1 1>&2 2>&3 )', 'false' ) ; is( $stringT, undef, 'pipemess: list context, "no output STDERR blablabla then false", string' ) ; - like( $errorT, qr{blablabla"}xm, 'pipemess: list context, "no output STDERR blablabla then false", error' ) ; + like( $errorT, qr{blablabla"}xm, 'pipemess: list context, "no output STDERR blablabla then false", error' ) ; ( $stringT, $errorT ) = pipemess( 'dontcare', 'false', '( echo -n blablabla 3>&1 1>&2 2>&3 )' ) ; is( $stringT, undef, 'pipemess: list context, "false then STDERR blablabla", string' ) ; @@ -9191,9 +11543,10 @@ sub tests_pipemess { -sub tests_is_a_release_number { +sub tests_is_a_release_number +{ note( 'Entering tests_is_a_release_number()' ) ; - + is( undef, is_a_release_number( ), 'is_a_release_number: no args => undef' ) ; ok( is_a_release_number( $RELEASE_NUMBER_EXAMPLE_1 ), 'is_a_release_number 1.351' ) ; ok( is_a_release_number( $RELEASE_NUMBER_EXAMPLE_2 ), 'is_a_release_number 42.4242' ) ; @@ -9204,7 +11557,8 @@ sub tests_is_a_release_number { return ; } -sub is_a_release_number { +sub is_a_release_number +{ my $number = shift ; if ( ! defined $number ) { return ; } return( $number =~ m{^\d+\.\d+$}xo ) ; @@ -9212,7 +11566,8 @@ sub is_a_release_number { -sub imapsync_version_public { +sub imapsync_version_public +{ my $local_version = imapsync_version( $sync ) ; my $imapsync_basename = imapsync_basename( ) ; @@ -9237,7 +11592,8 @@ sub imapsync_version_public { return( $last_release ) ; } -sub not_long_imapsync_version_public { +sub not_long_imapsync_version_public +{ #myprint( "Entering not_long_imapsync_version_public\n" ) ; my $fake = shift ; @@ -9254,7 +11610,7 @@ sub not_long_imapsync_version_public { POSIX::sigaction(SIGALRM, POSIX::SigAction->new(sub { croak 'alarm' } ) ) - or myprint( "Error setting SIGALRM handler: $OS_ERROR\n" ) ; + or myprint( "Error setting SIGALRM handler: $OS_ERROR\n" ) ; } my $ret = eval { @@ -9262,12 +11618,12 @@ sub not_long_imapsync_version_public { { $val = imapsync_version_public( ) ; #sleep 4 ; - #myprint( "End of imapsync_version_public\n" ) ; + #myprint( "End of imapsync_version_public\n" ) ; } alarm 0 ; 1 ; } ; - #myprint( "eval [$ret]\n" ) ; + #myprint( "eval [$ret]\n" ) ; if ( ( not $ret ) or $EVAL_ERROR ) { #myprint( "$EVAL_ERROR" ) ; if ($EVAL_ERROR =~ /alarm/) { @@ -9283,7 +11639,8 @@ sub not_long_imapsync_version_public { } } -sub tests_not_long_imapsync_version_public { +sub tests_not_long_imapsync_version_public +{ note( 'Entering tests_not_long_imapsync_version_public()' ) ; @@ -9294,20 +11651,21 @@ sub tests_not_long_imapsync_version_public { return ; } -sub check_last_release { +sub check_last_release +{ my $fake = shift ; my $public_release = not_long_imapsync_version_public( $fake ) ; - $debug and myprint( "check_last_release: [$public_release]\n" ) ; + $sync->{ debug } and myprint( "check_last_release: [$public_release]\n" ) ; my $inline_help_when_on = '( Use --noreleasecheck to avoid this release check. )' ; - + if ( $public_release eq 'unknown' ) { return( 'Imapsync public release is unknown.' . $inline_help_when_on ) ; } - + if ( $public_release eq 'timeout' ) { return( 'Imapsync public release is unknown (timeout).' . $inline_help_when_on ) ; } - + if ( ! is_a_release_number( $public_release ) ) { return( "Imapsync public release is unknown ($public_release)." . $inline_help_when_on ) ; } @@ -9323,7 +11681,8 @@ sub check_last_release { return( 'really unknown' ) ; # Should never arrive here } -sub tests_check_last_release { +sub tests_check_last_release +{ note( 'Entering tests_check_last_release()' ) ; diag( check_last_release( 1.1 ) ) ; @@ -9342,7 +11701,8 @@ sub tests_check_last_release { return ; } -sub imapsync_version { +sub imapsync_version +{ my $mysync = shift ; my $rcs = $mysync->{rcs} ; my $version ; @@ -9352,7 +11712,8 @@ sub imapsync_version { } -sub tests_version_from_rcs { +sub tests_version_from_rcs +{ note( 'Entering tests_version_from_rcs()' ) ; is( undef, version_from_rcs( ), 'version_from_rcs: no args => UNKNOWN' ) ; @@ -9364,22 +11725,24 @@ sub tests_version_from_rcs { } -sub version_from_rcs { +sub version_from_rcs +{ my $rcs = shift ; if ( ! $rcs ) { return ; } - + my $version = 'UNKNOWN' ; if ( $rcs =~ m{,v\s+(\d+\.\d+)}mxso ) { $version = $1 } - + return( $version ) ; } -sub tests_imapsync_basename { +sub tests_imapsync_basename +{ note( 'Entering tests_imapsync_basename()' ) ; ok( imapsync_basename() =~ m/imapsync/, 'imapsync_basename: match imapsync'); @@ -9389,14 +11752,16 @@ sub tests_imapsync_basename { return ; } -sub imapsync_basename { +sub imapsync_basename +{ return basename( $PROGRAM_NAME ) ; } -sub localhost_info { +sub localhost_info +{ my $mysync = shift ; my( $infos ) = join( q{}, "Here is imapsync ", imapsync_version( $mysync ), @@ -9411,7 +11776,8 @@ sub localhost_info { return( $infos ) ; } -sub tests_cpu_number { +sub tests_cpu_number +{ note( 'Entering tests_cpu_number()' ) ; is( 1, is_an_integer( cpu_number( ) ), "cpu_number: is_an_integer" ) ; @@ -9424,7 +11790,8 @@ sub tests_cpu_number { return ; } -sub cpu_number { +sub cpu_number +{ my $cpu_number_forced = shift ; # Well, here 1 is better than 0 or undef @@ -9434,19 +11801,19 @@ sub cpu_number { if ( $ENV{"NUMBER_OF_PROCESSORS"} ) { # might be under a Windows system $cpu_number = $ENV{"NUMBER_OF_PROCESSORS"} ; - $debug and myprint( "Number of processors found by env var NUMBER_OF_PROCESSORS: $cpu_number\n" ) ; - }elsif ( 'darwin' eq $OSNAME ) { + $sync->{ debug } and myprint( "Number of processors found by env var NUMBER_OF_PROCESSORS: $cpu_number\n" ) ; + }elsif ( 'darwin' eq $OSNAME or 'freebsd' eq $OSNAME ) { $cpu_number = backtick( "sysctl -n hw.ncpu" ) ; chomp( $cpu_number ) ; - $debug and myprint( "Number of processors found by cmd 'sysctl -n hw.ncpu': $cpu_number\n" ) ; + $sync->{ debug } and myprint( "Number of processors found by cmd 'sysctl -n hw.ncpu': $cpu_number\n" ) ; }elsif ( ! -e '/proc/cpuinfo' ) { - $debug and myprint( "Number of processors not found so I might assume there is only 1\n" ) ; + $sync->{ debug } and myprint( "Number of processors not found so I might assume there is only 1\n" ) ; $cpu_number = 1 ; }elsif( @cpuinfo = file_to_array( '/proc/cpuinfo' ) ) { $cpu_number = grep { /^processor/mxs } @cpuinfo ; - $debug and myprint( "Number of processors found via /proc/cpuinfo: $cpu_number\n" ) ; + $sync->{ debug } and myprint( "Number of processors found via /proc/cpuinfo: $cpu_number\n" ) ; } - + if ( defined $cpu_number_forced ) { $cpu_number = $cpu_number_forced ; } @@ -9454,28 +11821,34 @@ sub cpu_number { } -sub tests_integer_or_1 { +sub tests_integer_or_1 +{ + note( 'Entering tests_integer_or_1()' ) ; is( 1, integer_or_1( ), 'integer_or_1: no args => 1' ) ; is( 1, integer_or_1( undef ), 'integer_or_1: undef => 1' ) ; is( $NUMBER_10, integer_or_1( $NUMBER_10 ), 'integer_or_1: 10 => 10' ) ; is( 1, integer_or_1( q{} ), 'integer_or_1: empty string => 1' ) ; is( 1, integer_or_1( 'lalala' ), 'integer_or_1: lalala => 1' ) ; + + note( 'Leaving tests_integer_or_1()' ) ; return ; } -sub integer_or_1 { +sub integer_or_1 +{ my $number = shift ; - if ( is_an_integer( $number ) ) { - return $number ; + if ( is_an_integer( $number ) ) { + return $number ; } # else return 1 ; } -sub tests_is_an_integer { +sub tests_is_an_integer +{ note( 'Entering tests_is_an_integer()' ) ; - + is( undef, is_an_integer( ), 'is_an_integer: no args => undef ' ) ; ok( is_an_integer( 1 ), 'is_an_integer: 1 => yes ') ; ok( is_an_integer( $NUMBER_42 ), 'is_an_integer: 42 => yes ') ; @@ -9491,7 +11864,8 @@ sub tests_is_an_integer { return ; } -sub is_an_integer { +sub is_an_integer +{ my $number = shift ; if ( ! defined $number ) { return ; } return( $number =~ m{^\d+$}xo ) ; @@ -9500,7 +11874,8 @@ sub is_an_integer { -sub tests_loadavg { +sub tests_loadavg +{ note( 'Entering tests_loadavg()' ) ; @@ -9535,10 +11910,14 @@ sub tests_loadavg { } -sub loadavg { +sub loadavg +{ if ( 'linux' eq $OSNAME ) { return ( loadavg_linux( @ARG ) ) ; } + if ( 'freebsd' eq $OSNAME ) { + return ( loadavg_freebsd( @ARG ) ) ; + } if ( 'darwin' eq $OSNAME ) { return ( loadavg_darwin( @ARG ) ) ; } @@ -9549,7 +11928,8 @@ sub loadavg { } -sub loadavg_linux { +sub loadavg_linux +{ my $line = shift ; if ( ! $line ) { @@ -9558,14 +11938,37 @@ sub loadavg_linux { my ( $avg_1_min, $avg_5_min, $avg_15_min, $current_runs ) = split /\s/mxs, $line ; if ( all_defined( $avg_1_min, $avg_5_min, $avg_15_min ) ) { - $debug and myprint( "System load: $avg_1_min $avg_5_min $avg_15_min $current_runs\n" ) ; + $sync->{ debug } and myprint( "System load: $avg_1_min $avg_5_min $avg_15_min $current_runs\n" ) ; return ( $avg_1_min, $avg_5_min, $avg_15_min, $current_runs ) ; } return ; } +sub loadavg_freebsd +{ + my $file = shift ; + # Example of output of command "sysctl vm.loadavg": + # vm.loadavg: { 0.15 0.08 0.08 } + my $loadavg ; -sub loadavg_darwin { + if ( ! defined $file ) { + eval { + $loadavg = `/sbin/sysctl vm.loadavg` ; + #myprint( "LOADAVG FREEBSD: $loadavg\n" ) ; + } ; + if ( $EVAL_ERROR ) { myprint( "[$EVAL_ERROR]\n" ) ; return ; } + }else{ + $loadavg = firstline( $file ) or return ; + } + + my ( $avg_1_min, $avg_5_min, $avg_15_min ) + = $loadavg =~ /vm\.loadavg\s*[:=]\s*\{?\s*(\d+\.?\d*)\s+(\d+\.?\d*)\s+(\d+\.?\d*)/mxs ; + $sync->{ debug } and myprint( "System load: $avg_1_min $avg_5_min $avg_15_min\n" ) ; + return ( $avg_1_min, $avg_5_min, $avg_15_min ) ; +} + +sub loadavg_darwin +{ my $file = shift ; # Example of output of command "sysctl vm.loadavg": # vm.loadavg: { 0.15 0.08 0.08 } @@ -9583,11 +11986,12 @@ sub loadavg_darwin { my ( $avg_1_min, $avg_5_min, $avg_15_min ) = $loadavg =~ /vm\.loadavg\s*[:=]\s*\{?\s*(\d+\.?\d*)\s+(\d+\.?\d*)\s+(\d+\.?\d*)/mxs ; - $debug and myprint( "System load: $avg_1_min $avg_5_min $avg_15_min\n" ) ; + $sync->{ debug } and myprint( "System load: $avg_1_min $avg_5_min $avg_15_min\n" ) ; return ( $avg_1_min, $avg_5_min, $avg_15_min ) ; } -sub loadavg_windows { +sub loadavg_windows +{ my $file = shift ; # Example of output of command "wmic cpu get loadpercentage": # LoadPercentage @@ -9609,7 +12013,7 @@ sub loadavg_windows { my $num = $1 ; $num /= 100 ; - $debug and myprint( "System load: $num\n" ) ; + $sync->{ debug } and myprint( "System load: $num\n" ) ; return ( $num ) ; } @@ -9618,39 +12022,65 @@ sub loadavg_windows { -sub tests_load_and_delay { +sub tests_load_and_delay +{ note( 'Entering tests_load_and_delay()' ) ; is( undef, load_and_delay( ), 'load_and_delay: no args => undef ' ) ; is( undef, load_and_delay( 1 ), 'load_and_delay: not 4 args => undef ' ) ; is( undef, load_and_delay( 0, 1, 1, 1 ), 'load_and_delay: division per 0 => undef ' ) ; is( 0, load_and_delay( 1, 1, 1, 1 ), 'load_and_delay: one core, loads are all 1 => ok ' ) ; + is( 0, load_and_delay( 1, 1, 1, 1, 'lalala' ), 'load_and_delay: five arguments is ok' ) ; is( 0, load_and_delay( 2, 2, 2, 2 ), 'load_and_delay: two core, loads are all 2 => ok ' ) ; is( 0, load_and_delay( 2, 2, 4, 5 ), 'load_and_delay: two core, load1m is 2 => ok ' ) ; - is( 0, load_and_delay( 1, 0, 0, 0 ), 'load_and_delay: one core, load1m=0 load5m=0 load15m=0 => 0 ' ) ; - is( 0, load_and_delay( 1, 0, 0, 2 ), 'load_and_delay: one core, load1m=0 load5m=0 load15m=2 => 0 ' ) ; - is( 0, load_and_delay( 1, 0, 2, 0 ), 'load_and_delay: one core, load1m=0 load5m=2 load15m=0 => 0 ' ) ; - is( 0, load_and_delay( 1, 0, 2, 2 ), 'load_and_delay: one core, load1m=0 load5m=2 load15m=2 => 0 ' ) ; - is( 1, load_and_delay( 1, 2, 0, 0 ), 'load_and_delay: one core, load1m=2 load5m=0 load15m=0 => 1 ' ) ; - is( 1, load_and_delay( 1, 2, 0, 2 ), 'load_and_delay: one core, load1m=2 load5m=0 load15m=2 => 1 ' ) ; - is( 5, load_and_delay( 1, 2, 2, 0 ), 'load_and_delay: one core, load1m=2 load5m=2 load15m=0 => 5 ' ) ; - is( 15, load_and_delay( 1, 2, 2, 2 ), 'load_and_delay: one core, load1m=2 load5m=2 load15m=2 => 15 ' ) ; +# Old behavior, rather strict + # is( 0, load_and_delay( 1, 0, 0, 0 ), 'load_and_delay: one core, load1m=0 load5m=0 load15m=0 => 0 ' ) ; + # is( 0, load_and_delay( 1, 0, 0, 2 ), 'load_and_delay: one core, load1m=0 load5m=0 load15m=2 => 0 ' ) ; + # is( 0, load_and_delay( 1, 0, 2, 0 ), 'load_and_delay: one core, load1m=0 load5m=2 load15m=0 => 0 ' ) ; + # is( 0, load_and_delay( 1, 0, 2, 2 ), 'load_and_delay: one core, load1m=0 load5m=2 load15m=2 => 0 ' ) ; + # is( 1, load_and_delay( 1, 2, 0, 0 ), 'load_and_delay: one core, load1m=2 load5m=0 load15m=0 => 1 ' ) ; + # is( 1, load_and_delay( 1, 2, 0, 2 ), 'load_and_delay: one core, load1m=2 load5m=0 load15m=2 => 1 ' ) ; + # is( 5, load_and_delay( 1, 2, 2, 0 ), 'load_and_delay: one core, load1m=2 load5m=2 load15m=0 => 5 ' ) ; + # is( 15, load_and_delay( 1, 2, 2, 2 ), 'load_and_delay: one core, load1m=2 load5m=2 load15m=2 => 15 ' ) ; - is( 0, load_and_delay( 4, 0, 2, 2 ), 'load_and_delay: four core, load1m=0 load5m=2 load15m=2 => 0 ' ) ; - is( 1, load_and_delay( 4, 8, 0, 0 ), 'load_and_delay: four core, load1m=2 load5m=0 load15m=0 => 1 ' ) ; - is( 1, load_and_delay( 4, 8, 0, 2 ), 'load_and_delay: four core, load1m=2 load5m=0 load15m=2 => 1 ' ) ; - is( 5, load_and_delay( 4, 8, 8, 0 ), 'load_and_delay: four core, load1m=2 load5m=2 load15m=0 => 5 ' ) ; - is( 15, load_and_delay( 4, 8, 8, 8 ), 'load_and_delay: four core, load1m=2 load5m=2 load15m=2 => 15 ' ) ; - is( 15, load_and_delay( 4, 8, 8, 8, 'lalala' ), 'load_and_delay: five arguments is ok' ) ; + # is( 0, load_and_delay( 4, 0, 2, 2 ), 'load_and_delay: four core, load1m=0 load5m=2 load15m=2 => 0 ' ) ; + # is( 1, load_and_delay( 4, 8, 0, 0 ), 'load_and_delay: four core, load1m=2 load5m=0 load15m=0 => 1 ' ) ; + # is( 1, load_and_delay( 4, 8, 0, 2 ), 'load_and_delay: four core, load1m=2 load5m=0 load15m=2 => 1 ' ) ; + # is( 5, load_and_delay( 4, 8, 8, 0 ), 'load_and_delay: four core, load1m=2 load5m=2 load15m=0 => 5 ' ) ; + # is( 15, load_and_delay( 4, 8, 8, 8 ), 'load_and_delay: four core, load1m=2 load5m=2 load15m=2 => 15 ' ) ; - note( 'Leaving tests_load_and_delay()' ) ; - return ; +# New behavior, tolerate more load + + is( 0, load_and_delay( 1, 0, 0, 0 ), 'load_and_delay: one core, load1m=0 load5m=0 load15m=0 => 0 ' ) ; + is( 0, load_and_delay( 1, 0, 0, 2 ), 'load_and_delay: one core, load1m=0 load5m=0 load15m=2 => 0 ' ) ; + is( 0, load_and_delay( 1, 0, 2, 0 ), 'load_and_delay: one core, load1m=0 load5m=2 load15m=0 => 0 ' ) ; + is( 0, load_and_delay( 1, 0, 2, 2 ), 'load_and_delay: one core, load1m=0 load5m=2 load15m=2 => 0 ' ) ; + is( 0, load_and_delay( 1, 2, 0, 0 ), 'load_and_delay: one core, load1m=2 load5m=0 load15m=0 => 1 ' ) ; + is( 0, load_and_delay( 1, 2, 0, 2 ), 'load_and_delay: one core, load1m=2 load5m=0 load15m=2 => 1 ' ) ; + is( 0, load_and_delay( 1, 2, 2, 0 ), 'load_and_delay: one core, load1m=2 load5m=2 load15m=0 => 5 ' ) ; + is( 0, load_and_delay( 1, 2, 2, 2 ), 'load_and_delay: one core, load1m=2 load5m=2 load15m=2 => 15 ' ) ; + + is( 1, load_and_delay( 1, 4, 0, 0 ), 'load_and_delay: one core, load1m=4 load5m=0 load15m=0 => 1 ' ) ; + is( 1, load_and_delay( 1, 4, 0, 4 ), 'load_and_delay: one core, load1m=4 load5m=0 load15m=4 => 1 ' ) ; + is( 5, load_and_delay( 1, 4, 4, 0 ), 'load_and_delay: one core, load1m=4 load5m=4 load15m=0 => 5 ' ) ; + is( 15, load_and_delay( 1, 4, 4, 4 ), 'load_and_delay: one core, load1m=4 load5m=4 load15m=4 => 15 ' ) ; + + is( 0, load_and_delay( 4, 0, 9, 9 ), 'load_and_delay: four core, load1m=0 load5m=9 load15m=9 => 0 ' ) ; + is( 1, load_and_delay( 4, 9, 0, 0 ), 'load_and_delay: four core, load1m=9 load5m=0 load15m=0 => 1 ' ) ; + is( 1, load_and_delay( 4, 9, 0, 9 ), 'load_and_delay: four core, load1m=9 load5m=0 load15m=9 => 1 ' ) ; + is( 5, load_and_delay( 4, 9, 9, 0 ), 'load_and_delay: four core, load1m=9 load5m=9 load15m=0 => 5 ' ) ; + is( 15, load_and_delay( 4, 9, 9, 9 ), 'load_and_delay: four core, load1m=9 load5m=9 load15m=9 => 15 ' ) ; + + note( 'Leaving tests_load_and_delay()' ) ; + return ; } -sub load_and_delay { +sub load_and_delay +{ # Basically return 0 if load is not heavy, ie <= 1 per processor + # Not enough arguments if ( 4 > scalar @ARG ) { return ; } my ( $cpu_num, $avg_1_min, $avg_5_min, $avg_15_min ) = @ARG ; @@ -9660,17 +12090,18 @@ sub load_and_delay { # Let divide by number of cores ( $avg_1_min, $avg_5_min, $avg_15_min ) = map { $_ / $cpu_num } ( $avg_1_min, $avg_5_min, $avg_15_min ) ; # One of avg ok => ok, for now it is a OR - if ( $avg_1_min <= 1 ) { return 0 ; } - if ( $avg_5_min <= 1 ) { return 1 ; } # Retry in 1 minute - if ( $avg_15_min <= 1 ) { return 5 ; } # Retry in 5 minutes + if ( $avg_1_min <= 2 ) { return 0 ; } + if ( $avg_5_min <= 2 ) { return 1 ; } # Retry in 1 minute + if ( $avg_15_min <= 2 ) { return 5 ; } # Retry in 5 minutes return 15 ; # Retry in 15 minutes } -sub ram_memory_info { +sub ram_memory_info +{ # In GigaBytes so division by 1024 * 1024 * 1024 - # + # return( - sprintf( "%.1f/%.1f free GiB of RAM", + sprintf( "%.1f/%.1f free GiB of RAM", Sys::MemInfo::get("freemem") / ( $KIBI ** 3 ), Sys::MemInfo::get("totalmem") / ( $KIBI ** 3 ), ) @@ -9679,20 +12110,22 @@ sub ram_memory_info { -sub tests_memory_stress { +sub tests_memory_stress +{ note( 'Entering tests_memory_stress()' ) ; - + is( undef, memory_stress( ), 'memory_stress: => undef' ) ; - + note( 'Leaving tests_memory_stress()' ) ; return ; } -sub memory_stress { +sub memory_stress +{ my $total_ram_in_MB = Sys::MemInfo::get("totalmem") / ( $KIBI * $KIBI ) ; my $i = 1 ; - + myprintf("Stress memory consumption before: %.1f MiB\n", memory_consumption( ) / $KIBI / $KIBI ) ; while ( $i < $total_ram_in_MB / 1.7 ) { $a .= "A" x 1000_000; $i++ } ; myprintf("Stress memory consumption after: %.1f MiB\n", memory_consumption( ) / $KIBI / $KIBI ) ; @@ -9700,7 +12133,8 @@ sub memory_stress { } -sub tests_memory_consumption { +sub tests_memory_consumption +{ note( 'Entering tests_memory_consumption()' ) ; like( memory_consumption( ), qr{\d+}xms,'memory_consumption no args') ; @@ -9717,33 +12151,36 @@ sub tests_memory_consumption { return ; } -sub memory_consumption { +sub memory_consumption +{ # memory consumed by imapsync until now in bytes return( ( memory_consumption_of_pids( ) )[0] ); } -sub debugmemory { +sub debugmemory +{ my $mysync = shift ; if ( ! $mysync->{debugmemory} ) { return q{} ; } - + my $precision = shift ; return( mysprintf( "Memory consumption$precision: %.1f MiB\n", memory_consumption( ) / $KIBI / $KIBI ) ) ; } -sub memory_consumption_of_pids { +sub memory_consumption_of_pids +{ my @pid = @_; @pid = ( @pid ) ? @pid : ( $PROCESS_ID ) ; - $debug and myprint( "memory_consumption_of_pids PIDs: @pid\n" ) ; + $sync->{ debug } and myprint( "memory_consumption_of_pids PIDs: @pid\n" ) ; my @val ; - if ( 'MSWin32' eq $OSNAME ) { + if ( ( 'MSWin32' eq $OSNAME ) or ( 'cygwin' eq $OSNAME ) ) { @val = memory_consumption_of_pids_win32( @pid ) ; }else{ # Unix my @ps = qx{ ps -o vsz -p @pid } ; #myprint( "ps: @ps" ) ; - + # Use IPC::Open3 from perlcrit -3 # It stalls on Darwin, don't understand why! #my @ps = backtick( "ps -o vsz -p @pid" ) ; @@ -9752,14 +12189,15 @@ sub memory_consumption_of_pids { shift @ps; # First line is column name "VSZ" chomp @ps; # convert to octets - + @val = map { $_ * $KIBI } @ps ; } - $debug and myprint "@val\n" ; + $sync->{ debug } and myprint( "@val\n" ) ; return( @val ) ; } -sub memory_consumption_of_pids_win32 { +sub memory_consumption_of_pids_win32 +{ # Windows my @PID = @_; my %PID; @@ -9791,7 +12229,8 @@ sub memory_consumption_of_pids_win32 { } -sub tests_backtick { +sub tests_backtick +{ note( 'Entering tests_backtick()' ) ; is( undef, backtick( ), 'backtick: no args' ) ; @@ -9803,11 +12242,11 @@ sub tests_backtick { @output = backtick( 'echo Hello World!' ) ; # Add \r on Windows. ok( "Hello World!\r\n" eq $output[0], 'backtick: echo Hello World!' ) ; - $debug and myprint( "[@output]" ) ; + $sync->{ debug } and myprint( "[@output]" ) ; @output = backtick( 'echo Hello & echo World!' ) ; ok( "Hello \r\n" eq $output[0], 'backtick: echo Hello & echo World! line 1' ) ; ok( "World!\r\n" eq $output[1], 'backtick: echo Hello & echo World! line 2' ) ; - $debug and myprint( "[@output][$output[0]][$output[1]]" ) ; + $sync->{ debug } and myprint( "[@output][$output[0]][$output[1]]" ) ; # Scalar context ok( "Hello World!\r\n" eq backtick( 'echo Hello World!' ), 'backtick: echo Hello World! scalar' ) ; @@ -9821,11 +12260,11 @@ sub tests_backtick { my @output ; @output = backtick( 'echo Hello World!' ) ; ok( "Hello World!\n" eq $output[0], 'backtick: echo Hello World!' ) ; - $debug and myprint( "[@output]" ) ; + $sync->{ debug } and myprint( "[@output]" ) ; @output = backtick( "echo Hello\necho World!" ) ; ok( "Hello\n" eq $output[0], 'backtick: echo Hello; echo World! line 1' ) ; ok( "World!\n" eq $output[1], 'backtick: echo Hello; echo World! line 2' ) ; - $debug and myprint( "[@output]" ) ; + $sync->{ debug } and myprint( "[@output]" ) ; # Scalar context ok( "Hello World!\n" eq backtick( 'echo Hello World!' ), 'backtick: echo Hello World! scalar' ) ; @@ -9834,8 +12273,8 @@ sub tests_backtick { # Return error positive value, that's ok is( undef, backtick( 'false' ), 'backtick: false returns no output' ) ; my $mem = backtick( "ps -o vsz -p $PROCESS_ID" ) ; - $debug and myprint "MEM=$mem\n" ; - + $sync->{ debug } and myprint( "MEM=$mem\n" ) ; + } note( 'Leaving tests_backtick()' ) ; @@ -9843,7 +12282,8 @@ sub tests_backtick { } -sub backtick { +sub backtick +{ my $command = shift ; if ( ! $command ) { return ; } @@ -9856,7 +12296,7 @@ sub backtick { } ; if ( $EVAL_ERROR ) { myprint( $EVAL_ERROR ) ; - return ; + return ; } if ( ! $eval ) { return ; } if ( ! $pid ) { return ; } @@ -9876,7 +12316,91 @@ sub backtick { } -sub remove_not_num { + +sub tests_check_binary_embed_all_dyn_libs +{ + note( 'Entering tests_check_binary_embed_all_dyn_libs()' ) ; + + is( 1, check_binary_embed_all_dyn_libs( ), 'check_binary_embed_all_dyn_libs: no args => 1' ) ; + + note( 'Leaving tests_check_binary_embed_all_dyn_libs()' ) ; + + return ; +} + + +sub check_binary_embed_all_dyn_libs +{ + my @search_dyn_lib_locale = search_dyn_lib_locale( ) ; + + if ( @search_dyn_lib_locale ) + { + myprint( "Found myself $PROGRAM_NAME pid $PROCESS_ID using locale dynamic libraries that seems out of myself:\n" ) ; + myprint( @search_dyn_lib_locale ) ; + if ( $PROGRAM_NAME =~ m{imapsync_bin_Darwin} ) + { + return 0 ; + } + elsif ( $PROGRAM_NAME =~ m{imapsync.*\.exe} ) + { + return 0 ; + } + else + { + # is always ok for non binary + return 1 ; + } + } + else + { + # Found only embedded dynamic lib + myprint( "Found nothing\n" ) ; + return 1 ; + } +} + +sub search_dyn_lib_locale +{ + if ( 'darwin' eq $OSNAME ) + { + return search_dyn_lib_locale_darwin( ) ; + } + if ( 'linux' eq $OSNAME ) + { + return search_dyn_lib_locale_linux( ) ; + } + if ( 'MSWin32' eq $OSNAME ) + { + return search_dyn_lib_locale_MSWin32( ) ; + } +} + +sub search_dyn_lib_locale_darwin +{ + my $command = qq{ lsof -p $PID | grep ' REG ' | grep .dylib | grep -v '/par-' } ; + myprint( "Search non embeded dynamic libs with the command: $command\n" ) ; + return backtick( $command ) ; +} + +sub search_dyn_lib_locale_linux +{ + my $command = qq{ lsof -p $PID | grep ' REG ' | grep -v '/tmp/par-' | grep '\.so' } ; + myprint( "Search non embeded dynamic libs with the command: $command\n" ) ; + return backtick( $command ) ; +} + +sub search_dyn_lib_locale_MSWin32 +{ + my $command = qq{ Listdlls.exe $PID|findstr Strawberry } ; + # $command = qq{ Listdlls.exe $PID|findstr Strawberry } ; + myprint( "Search non embeded dynamic libs with the command: $command\n" ) ; + return qx( $command ) ; +} + + + +sub remove_not_num +{ my $string = shift ; $string =~ tr/0-9//cd ; @@ -9884,7 +12408,8 @@ sub remove_not_num { return( $string ) ; } -sub tests_remove_not_num { +sub tests_remove_not_num +{ note( 'Entering tests_remove_not_num()' ) ; ok( '123' eq remove_not_num( 123 ), 'remove_not_num( 123 )' ) ; @@ -9896,7 +12421,8 @@ sub tests_remove_not_num { return ; } -sub remove_Ko { +sub remove_Ko +{ my $string = shift; if ($string =~ /^(.*)\sKo$/xo) { return($1); @@ -9905,7 +12431,8 @@ sub remove_Ko { } } -sub remove_qq { +sub remove_qq +{ my $string = shift; if ($string =~ /^"(.*)"$/xo) { return($1); @@ -9914,7 +12441,8 @@ sub remove_qq { } } -sub memory_consumption_ratio { +sub memory_consumption_ratio +{ my ($base) = @_; $base ||= 1; @@ -9923,24 +12451,26 @@ sub memory_consumption_ratio { } -sub date_from_rcs { +sub date_from_rcs +{ my $d = shift ; my %num2mon = qw( 01 Jan 02 Feb 03 Mar 04 Apr 05 May 06 Jun 07 Jul 08 Aug 09 Sep 10 Oct 11 Nov 12 Dec ) ; if ($d =~ m{(\d{4})/(\d{2})/(\d{2})\s(\d{2}):(\d{2}):(\d{2})}xo ) { # Handles the following format # 2015/07/10 11:05:59 -- Generated by RCS Date tag. - #myprint( "$d\n" ) ; - #myprint( "header: [$1][$2][$3][$4][$5][$6]\n" ) ; + #myprint( "$d\n" ) ; + #myprint( "header: [$1][$2][$3][$4][$5][$6]\n" ) ; my ($year, $month, $day, $hour, $min, $sec) = ($1,$2,$3,$4,$5,$6) ; $month = $num2mon{$month} ; $d = "$day-$month-$year $hour:$min:$sec +0000" ; - #myprint( "$d\n" ) ; + #myprint( "$d\n" ) ; } return( $d ) ; } -sub tests_date_from_rcs { +sub tests_date_from_rcs +{ note( 'Entering tests_date_from_rcs()' ) ; ok('19-Sep-2015 16:11:07 +0000' @@ -9950,7 +12480,8 @@ sub tests_date_from_rcs { return ; } -sub good_date { +sub good_date +{ # two incoming formats: # header Tue, 24 Aug 2010 16:00:00 +0200 # internal 24-Aug-2010 16:00:00 +0200 @@ -9963,7 +12494,7 @@ sub good_date { SWITCH: { if ( $d =~ m{(\d?)(\d-...-\d{4})(\s\d{2}:\d{2}:\d{2})(\s(?:\+|-)\d{4})?}xo ) { - #myprint( "internal: [$1][$2][$3][$4]\n" ) ; + #myprint( "internal: [$1][$2][$3][$4]\n" ) ; my ($day_1, $date_rest, $hour, $zone) = ($1,$2,$3,$4) ; $day_1 = '0' if ($day_1 eq q{}) ; $zone = ' +0000' if not defined $zone ; @@ -10017,12 +12548,12 @@ sub good_date { if ($d =~ m{(\d{4})/(\d{2})/(\d{2})\s(\d{2}):(\d{2}):(\d{2})}xo ) { # Handles the following format # 2015/07/10 11:05:59 -- Generated by RCS Date tag. - #myprint( "$d\n" ) ; - #myprint( "header: [$1][$2][$3][$4][$5][$6]\n" ) ; + #myprint( "$d\n" ) ; + #myprint( "header: [$1][$2][$3][$4][$5][$6]\n" ) ; my ($year, $month, $day, $hour, $min, $sec) = ($1,$2,$3,$4,$5,$6) ; $month = $num2mon{$month} ; $d = "$day-$month-$year $hour:$min:$sec +0000" ; - #myprint( "$d\n" ) ; + #myprint( "$d\n" ) ; last SWITCH ; } @@ -10080,7 +12611,8 @@ sub good_date { } -sub tests_good_date { +sub tests_good_date +{ note( 'Entering tests_good_date()' ) ; ok(q{} eq good_date(), 'good_date no arg'); @@ -10102,15 +12634,15 @@ sub tests_good_date { ok('"24-Aug-1997 16:00:00 +0200"' eq good_date('Tue, 24 Aug 97 16:00:00 +0200'), 'good_date header 2digit year'); ok('"24-Aug-2004 16:00:00 +0200"' eq good_date('Tue, 24 Aug 04 16:00:00 +0200'), 'good_date header 2digit year'); ok('"24-Aug-1997 16:00:00 +0200"' eq good_date('Tue, 24 Aug 1997 16.00.00 +0200'), 'good_date header period time sep'); - ok('"24-Aug-1997 16:00:00 +0200"' eq good_date('Tue, 24 Aug 1997 16:00:00 +0200'), 'good_date header extra white space type1'); + ok('"24-Aug-1997 16:00:00 +0200"' eq good_date('Tue, 24 Aug 1997 16:00:00 +0200'), 'good_date header extra white space type1'); ok('"24-Aug-1997 05:06:02 +0200"' eq good_date('Tue, 24 Aug 1997 5:6:2 +0200'), 'good_date header 1digit time vals'); ok('"24-Aug-1997 05:06:02 +0200"' eq good_date('Tue, 24, Aug 1997 05:06:02 +0200'), 'good_date header extra commas'); ok('"01-Oct-2003 12:45:24 +0000"' eq good_date('Wednesday, 01 October 2003 12:45:24 CDT'), 'good_date header no abbrev'); - ok('"11-Jan-2005 17:58:27 -0500"' eq good_date('Tue, 11 Jan 2005 17:58:27 -0500'), 'good_date extra white space'); + ok('"11-Jan-2005 17:58:27 -0500"' eq good_date('Tue, 11 Jan 2005 17:58:27 -0500'), 'good_date extra white space'); ok('"18-Dec-2002 15:07:00 +0000"' eq good_date('Wednesday, December 18, 2002 03:07 PM'), 'good_date kbtoys.com orders'); ok('"16-Dec-2004 02:01:49 -0500"' eq good_date('Dec 16 2004 02:01:49 -0500'), 'good_date jr.com orders'); ok('"21-Jun-2001 11:11:11 +0000"' eq good_date('21-Jun-2001'), 'good_date register.com domain transfer'); - ok('"18-Nov-2012 18:34:38 +0100"' eq good_date('Sun, 18 Nov 2012 18:34:38 +0100'), 'good_date pop2imap bug (Westeuropäische Normalzeit)'); + ok('"18-Nov-2012 18:34:38 +0100"' eq good_date('Sun, 18 Nov 2012 18:34:38 +0100'), 'good_date pop2imap bug (Westeuropäische Normalzeit)'); ok('"19-Sep-2015 16:11:07 +0000"' eq good_date('Date: 2015/09/19 16:11:07 '), 'good_date from RCS date' ) ; note( 'Leaving tests_good_date()' ) ; @@ -10118,7 +12650,8 @@ sub tests_good_date { } -sub tests_list_keys_in_2_not_in_1 { +sub tests_list_keys_in_2_not_in_1 +{ note( 'Entering tests_list_keys_in_2_not_in_1()' ) ; @@ -10135,62 +12668,197 @@ sub tests_list_keys_in_2_not_in_1 { return ; } -sub list_keys_in_2_not_in_1 { - - my $folders1_ref = shift; - my $folders2_ref = shift; +sub list_keys_in_2_not_in_1 +{ + my $hash_1_ref = shift; + my $hash_2_ref = shift; my @list; - foreach my $folder ( sort keys %{ $folders2_ref } ) { - next if exists $folders1_ref->{$folder}; - push @list, $folder; + foreach my $key ( sort keys %{ $hash_2_ref } ) { + #$debug and print "$folder\n" ; + next if exists $hash_1_ref->{$key} ; + push @list, $key ; } - return(@list); + #$debug and print "@list\n" ; + return( @list ) ; } -sub list_folders_in_2_not_in_1 { +sub list_folders_in_2_not_in_1 +{ - my (@h2_folders_not_in_h1, %h2_folders_not_in_h1) ; - @h2_folders_not_in_h1 = list_keys_in_2_not_in_1( \%h1_folders_all, \%h2_folders_all) ; + my ( @h2_folders_not_in_h1, %h2_folders_not_in_h1 ) ; + @h2_folders_not_in_h1 = list_keys_in_2_not_in_1( \%h1_folders_all, \%h2_folders_all ) ; map { $h2_folders_not_in_h1{$_} = 1} @h2_folders_not_in_h1 ; - @h2_folders_not_in_h1 = list_keys_in_2_not_in_1( \%h2_folders_from_1_all, \%h2_folders_not_in_h1) ; + @h2_folders_not_in_h1 = list_keys_in_2_not_in_1( \%h2_folders_from_1_all, \%h2_folders_not_in_h1 ) ; - return( reverse @h2_folders_not_in_h1 ); + return( reverse @h2_folders_not_in_h1 ) ; +} + +sub tests_nb_messages_in_2_not_in_1 +{ + note( 'Entering tests_stats_across_folders()' ) ; + is( undef, nb_messages_in_2_not_in_1( ), 'nb_messages_in_2_not_in_1: no args => undef' ) ; + + my $mysync->{ h1_folders_of_md5 }->{ 'some_id_01' }->{ 'some_folder_01' } = 1 ; + is( 0, nb_messages_in_2_not_in_1( $mysync ), 'nb_messages_in_2_not_in_1: no messages in 2 => 0' ) ; + + $mysync->{ h1_folders_of_md5 }->{ 'some_id_in_1_and_2' }->{ 'some_folder_01' } = 2 ; + $mysync->{ h2_folders_of_md5 }->{ 'some_id_in_1_and_2' }->{ 'some_folder_02' } = 4 ; + + is( 0, nb_messages_in_2_not_in_1( $mysync ), 'nb_messages_in_2_not_in_1: a common message => 0' ) ; + + $mysync->{ h2_folders_of_md5 }->{ 'some_id_in_2_not_in_1' }->{ 'some_folder_02' } = 1 ; + is( 1, nb_messages_in_2_not_in_1( $mysync ), 'nb_messages_in_2_not_in_1: one message in_2_not_in_1 => 1' ) ; + + $mysync->{ h2_folders_of_md5 }->{ 'some_other_id_in_2_not_in_1' }->{ 'some_folder_02' } = 3 ; + is( 2, nb_messages_in_2_not_in_1( $mysync ), 'nb_messages_in_2_not_in_1: two messages in_2_not_in_1 => 2' ) ; + + note( 'Leaving tests_stats_across_folders()' ) ; + return ; +} + +sub nb_messages_in_2_not_in_1 +{ + my $mysync = shift ; + if ( not defined $mysync ) { return ; } + + $mysync->{ nb_messages_in_2_not_in_1 } = scalar( + list_keys_in_2_not_in_1( + $mysync->{ h1_folders_of_md5 }, + $mysync->{ h2_folders_of_md5 } ) ) ; + + return $mysync->{ nb_messages_in_2_not_in_1 } ; } -sub tests_match { - note( 'Entering tests_match()' ) ; +sub nb_messages_in_1_not_in_2 +{ + my $mysync = shift ; + if ( not defined $mysync ) { return ; } - # undef serie - is( undef, match( ), 'match: no args => undef' ) ; - is( undef, match( 'lalala' ), 'match: one args => undef' ) ; + $mysync->{ nb_messages_in_1_not_in_2 } = scalar( + list_keys_in_2_not_in_1( + $mysync->{ h2_folders_of_md5 }, + $mysync->{ h1_folders_of_md5 } ) ) ; - # This one gives 0 under a binary made by pp - # but 1 under "normal" Perl interpreter. So a PAR bug? - #is( 1, match( q{}, q{} ), 'match: q{} =~ q{} => 1' ) ; - - is( 1, match( 'lalala', 'lalala' ), 'match: lalala =~ lalala => 1' ) ; - is( 1, match( 'lalala', '^lalala' ), 'match: lalala =~ ^lalala => 1' ) ; - is( 1, match( 'lalala', 'lalala$' ), 'match: lalala =~ lalala$ => 1' ) ; - is( 1, match( 'lalala', '^lalala$' ), 'match: lalala =~ ^lalala$ => 1' ) ; - is( 1, match( '_lalala_', 'lalala' ), 'match: _lalala_ =~ lalala => 1' ) ; - is( 1, match( 'lalala', '.*' ), 'match: lalala =~ .* => 1' ) ; - is( 1, match( 'lalala', '.' ), 'match: lalala =~ . => 1' ) ; - is( 1, match( '/lalala/', '/lalala/' ), 'match: /lalala/ =~ /lalala/ => 1' ) ; + return $mysync->{ nb_messages_in_1_not_in_2 } ; +} - is( 0, match( 'lalala', 'ooo' ), 'match: lalala =~ ooo => 0' ) ; - is( 0, match( 'lalala', 'lal_ala' ), 'match: lalala =~ lal_ala => 0' ) ; - is( 0, match( 'lalala', '\.' ), 'match: lalala =~ \. => 0' ) ; - is( 0, match( 'lalalaX', '^lalala$' ), 'match: lalalaX =~ ^lalala$ => 0' ) ; - is( 0, match( 'lalala', '/lalala/' ), 'match: lalala =~ /lalala/ => 1' ) ; - is( 1, match( 'LALALA', '(?i:lalala)' ), 'match: LALALA =~ (?i:lalala) => 1' ) ; +sub comment_on_final_diff_in_1_not_in_2 +{ + my $mysync = shift ; + + if ( not defined $mysync + or $mysync->{ justfolders } + or $mysync->{ useuid } + ) + { + return ; + } + + my $nb_identified_h1_messages = scalar( keys %{ $mysync->{ h1_folders_of_md5 } } ) ; + my $nb_identified_h2_messages = scalar( keys %{ $mysync->{ h2_folders_of_md5 } } ) ; + $mysync->{ debug } and myprint( "nb_keys h1_folders_of_md5 $nb_identified_h1_messages\n" ) ; + $mysync->{ debug } and myprint( "nb_keys h2_folders_of_md5 $nb_identified_h2_messages\n" ) ; + + if ( 0 == $nb_identified_h1_messages ) { return ; } + + # Calculate if not yet done + if ( not defined $mysync->{ nb_messages_in_1_not_in_2 } ) + { + nb_messages_in_1_not_in_2( $mysync ) ; + } + + + if ( 0 == $mysync->{ nb_messages_in_1_not_in_2 } ) + { + myprint( "The sync looks good, all $nb_identified_h1_messages identified messages in host1 are on host2.\n" ) ; + } + else + { + myprint( "The sync is not finished, there are $mysync->{ nb_messages_in_1_not_in_2 } identified messages in host1 that are not on host2.\n" ) ; + } + + if ( 1 <= $mysync->{ h1_nb_msg_noheader } ) + { + myprint( "There are $mysync->{ h1_nb_msg_noheader } unidentified messages (usually Sent or Draft messages). To sync them add option --addheader\n" ) ; + } + + return ; +} + +sub comment_on_final_diff_in_2_not_in_1 +{ + my $mysync = shift ; + + if ( not defined $mysync + or $mysync->{ justfolders } + or $mysync->{ useuid } + ) + { + return ; + } + + my $nb_identified_h2_messages = scalar( keys %{ $mysync->{ h2_folders_of_md5 } } ) ; + # Calculate if not yet done + if ( not defined $mysync->{ nb_messages_in_2_not_in_1 } ) + { + nb_messages_in_2_not_in_1( $mysync ) ; + } + + if ( 0 == $mysync->{ nb_messages_in_2_not_in_1 } ) + { + myprint( "The sync is strict, all $nb_identified_h2_messages identified messages in host2 are on host1.\n" ) ; + } + else + { + myprint( "The sync is not strict, there are ", + $mysync->{ nb_messages_in_2_not_in_1 }, + " messages in host2 that are not on host1.", + " Use --delete2 to delete them and have a strict sync.\n" ) ; + } + return ; +} + + +sub tests_match +{ + note( 'Entering tests_match()' ) ; + + # undef serie + is( undef, match( ), 'match: no args => undef' ) ; + is( undef, match( 'lalala' ), 'match: one args => undef' ) ; + + # This one gives 0 under a binary made by pp + # but 1 under "normal" Perl interpreter. So a PAR bug? + #is( 1, match( q{}, q{} ), 'match: q{} =~ q{} => 1' ) ; + + is( 'lalala', match( 'lalala', 'lalala' ), 'match: lalala =~ lalala => lalala' ) ; + is( 'lalala', match( 'lalala', '^lalala' ), 'match: lalala =~ ^lalala => lalala' ) ; + is( 'lalala', match( 'lalala', 'lalala$' ), 'match: lalala =~ lalala$ => lalala' ) ; + is( 'lalala', match( 'lalala', '^lalala$' ), 'match: lalala =~ ^lalala$ => lalala' ) ; + is( '_lalala_', match( '_lalala_', 'lalala' ), 'match: _lalala_ =~ lalala => _lalala_' ) ; + is( 'lalala', match( 'lalala', '.*' ), 'match: lalala =~ .* => lalala' ) ; + is( 'lalala', match( 'lalala', '.' ), 'match: lalala =~ . => lalala' ) ; + is( '/lalala/', match( '/lalala/', '/lalala/' ), 'match: /lalala/ =~ /lalala/ => /lalala/' ) ; + + is( 0, match( 'foo', 's/foo/bar/g' ), 'match: foo =~ s/foo/bar/g => 0' ) ; + is( 's/foo/bar/g', match( 's/foo/bar/g', 's/foo/bar/g' ), 'match: s/foo/bar/g =~ s/foo/bar/g => s/foo/bar/g' ) ; + + + is( 0, match( 'lalala', 'ooo' ), 'match: lalala =~ ooo => 0' ) ; + is( 0, match( 'lalala', 'lal_ala' ), 'match: lalala =~ lal_ala => 0' ) ; + is( 0, match( 'lalala', '\.' ), 'match: lalala =~ \. => 0' ) ; + is( 0, match( 'lalalaX', '^lalala$' ), 'match: lalalaX =~ ^lalala$ => 0' ) ; + is( 0, match( 'lalala', '/lalala/' ), 'match: lalala =~ /lalala/ => 0' ) ; + + is( 'LALALA', match( 'LALALA', '(?i:lalala)' ), 'match: LALALA =~ (?i:lalala) => 1' ) ; is( undef, match( 'LALALA', '(?{`ls /`})' ), 'match: LALALA =~ (?{`ls /`}) => undef' ) ; - is( undef, match( 'LALALA', '(?{print "CACA"})' ), 'match: LALALA =~ (?{print "CACA"}) => undef' ) ; + is( undef, match( 'LALALA', '(?{print "CACA"})' ), 'match: LALALA =~ (?{print "CACA"}) => undef' ) ; is( undef, match( 'CACA', '(??{print "CACA"})' ), 'match: CACA =~ (??{print "CACA"}) => undef' ) ; note( 'Leaving tests_match()' ) ; @@ -10198,15 +12866,16 @@ sub tests_match { return ; } -sub match { +sub match +{ my( $var, $regex ) = @ARG ; # undef cases if ( ( ! defined $var ) or ( ! defined $regex ) ) { return ; } # normal cases - if ( eval { $var =~ $regex } ) { - return 1 ; + if ( eval { $var =~ qr{$regex} } ) { + return $var ; }elsif ( $EVAL_ERROR ) { myprint( "Fatal regex $regex\n" ) ; return ; @@ -10217,7 +12886,8 @@ sub match { } -sub tests_notmatch { +sub tests_notmatch +{ note( 'Entering tests_notmatch()' ) ; # undef serie @@ -10232,7 +12902,7 @@ sub tests_notmatch { # but 0 under "normal" Perl interpreter. So a PAR bug, same in tests_match . #is( 0, notmatch( q{}, q{} ), 'notmatch: q{} !~ q{} => 0' ) ; - is( 0, notmatch( 'lalala', 'lalala' ), 'notmatch: lalala !~ lalala => 0' ) ; + is( 0, notmatch( 'lalala', 'lalala' ), 'notmatch: lalala !~ lalala => 0' ) ; is( 0, notmatch( 'lalala', '^lalala' ), 'notmatch: lalala !~ ^lalala => 0' ) ; is( 0, notmatch( 'lalala', 'lalala$' ), 'notmatch: lalala !~ lalala$ => 0' ) ; is( 0, notmatch( 'lalala', '^lalala$' ), 'notmatch: lalala !~ ^lalala$ => 0' ) ; @@ -10251,7 +12921,8 @@ sub tests_notmatch { return ; } -sub notmatch { +sub notmatch +{ my( $var, $regex ) = @ARG ; # undef cases @@ -10270,30 +12941,32 @@ sub notmatch { } -sub delete_folders_in_2_not_in_1 { +sub delete_folders_in_2_not_in_1 +{ foreach my $folder (@h2_folders_not_in_1) { if ( defined $delete2foldersonly and eval "\$folder !~ $delete2foldersonly" ) { - myprint( "Not deleting $folder because of --delete2foldersonly $delete2foldersonly\n" ) ; + myprint( "Not deleting $folder because of --delete2foldersonly $delete2foldersonly\n" ) ; next ; } if ( defined $delete2foldersbutnot and eval "\$folder =~ $delete2foldersbutnot" ) { - myprint( "Not deleting $folder because of --delete2foldersbutnot $delete2foldersbutnot\n" ) ; + myprint( "Not deleting $folder because of --delete2foldersbutnot $delete2foldersbutnot\n" ) ; next ; } my $res = $sync->{dry} ; # always success in dry mode! - $imap2->unsubscribe( $folder ) if ( ! $sync->{dry} ) ; - $res = $imap2->delete( $folder ) if ( ! $sync->{dry} ) ; + $sync->{imap2}->unsubscribe( $folder ) if ( ! $sync->{dry} ) ; + $res = $sync->{imap2}->delete( $folder ) if ( ! $sync->{dry} ) ; if ( $res ) { - myprint( "Deleted $folder", "$sync->{dry_message}", "\n" ) ; + myprint( "Deleted $folder", "$sync->{dry_message}", "\n" ) ; }else{ - myprint( "Deleting $folder failed", "\n" ) ; + myprint( "Deleting $folder failed", "\n" ) ; } } return ; } -sub delete_folder { +sub delete_folder +{ my ( $mysync, $imap, $folder, $Side ) = @_ ; if ( ! $mysync ) { return ; } if ( ! $imap ) { return ; } @@ -10306,15 +12979,16 @@ sub delete_folder { $res = $imap->delete( $folder ) ; } if ( $res ) { - myprint( "$Side deleted $folder", $mysync->{dry_message}, "\n" ) ; + myprint( "$Side deleted $folder", $mysync->{dry_message}, "\n" ) ; return 1 ; }else{ - myprint( "$Side deleting $folder failed", "\n" ) ; + myprint( "$Side deleting $folder failed", "\n" ) ; return ; } } -sub delete1emptyfolders { +sub delete1emptyfolders +{ my $mysync = shift ; if ( ! $mysync ) { return ; } # abort if no parameter if ( ! $mysync->{delete1emptyfolders} ) { return ; } # abort if --delete1emptyfolders off @@ -10327,30 +13001,30 @@ sub delete1emptyfolders { foreach my $folder ( reverse sort @{ $mysync->{h1_folders_wanted} } ) { my $parenthood = $imap->is_parent( $folder ) ; if ( defined $parenthood and $parenthood ) { - myprint( "Host1 folder $folder has subfolders\n" ) ; + myprint( "Host1: folder $folder has subfolders\n" ) ; $folders_kept{ $folder }++ ; next ; } - my $nb_messages_select = examine_folder_and_count( $imap, $folder, 'Host1' ) ; + my $nb_messages_select = examine_folder_and_count( $mysync, $imap, $folder, 'Host1' ) ; if ( ! defined $nb_messages_select ) { next ; } # Select failed => Neither continue nor keep this folder } my $nb_messages_search = scalar( @{ $imap->messages( ) } ) ; if ( 0 != $nb_messages_select and 0 != $nb_messages_search ) { - myprint( "Host1 folder $folder has messages: $nb_messages_search (search) $nb_messages_select (select)\n" ) ; + myprint( "Host1: folder $folder has messages: $nb_messages_search (search) $nb_messages_select (select)\n" ) ; $folders_kept{ $folder }++ ; next ; } if ( 0 != $nb_messages_select + $nb_messages_search ) { - myprint( "Host1 folder $folder odd messages count: $nb_messages_search (search) $nb_messages_select (select)\n" ) ; + myprint( "Host1: folder $folder odd messages count: $nb_messages_search (search) $nb_messages_select (select)\n" ) ; $folders_kept{ $folder }++ ; next ; } # Here we must have 0 messages by messages() aka "SEARCH ALL" and also "EXAMINE" if ( uc $folder eq 'INBOX' ) { - myprint( "Host1 Not deleting $folder\n" ) ; + myprint( "Host1: Not deleting $folder\n" ) ; $folders_kept{ $folder }++ ; next ; } - myprint( "Host1 deleting empty folder $folder\n" ) ; + myprint( "Host1: deleting empty folder $folder\n" ) ; # can not delete a SELECTed or EXAMINEd folder so closing it # could changed be SELECT INBOX $imap->close( ) ; # close after examine does not expunge; anyway expunging an empty folder... @@ -10366,7 +13040,8 @@ sub delete1emptyfolders { return ; } -sub remove_deleted_folders_from_wanted_list { +sub remove_deleted_folders_from_wanted_list +{ my ( $mysync, %folders_kept ) = @ARG ; my @h1_folders_wanted_init = @{ $mysync->{h1_folders_wanted} } ; @@ -10380,11 +13055,13 @@ sub remove_deleted_folders_from_wanted_list { return ; } -sub examine_folder_and_count { - my ( $imap, $folder, $Side ) = @_ ; + +sub examine_folder_and_count +{ + my ( $mysync, $imap, $folder, $Side ) = @_ ; $Side ||= 'HostX' ; - if ( ! examine_folder( $imap, $folder, $Side ) ) { + if ( ! examine_folder( $mysync, $imap, $folder, $Side ) ) { return ; } my $nb_messages_select = count_from_select( $imap->History ) ; @@ -10392,7 +13069,8 @@ sub examine_folder_and_count { } -sub tests_delete1emptyfolders { +sub tests_delete1emptyfolders +{ note( 'Entering tests_delete1emptyfolders()' ) ; @@ -10403,7 +13081,7 @@ sub tests_delete1emptyfolders { $syncT->{imap1} = $imapT ; is( undef, delete1emptyfolders( $syncT ), q{delete1emptyfolders: undef imap} ) ; - require Test::MockObject ; + require_ok( "Test::MockObject" ) ; $imapT = Test::MockObject->new( ) ; $syncT->{imap1} = $imapT ; @@ -10498,7 +13176,8 @@ sub tests_delete1emptyfolders { return ; } -sub tests_delete1emptyfolders_unit { +sub tests_delete1emptyfolders_unit +{ note( 'Entering tests_delete1emptyfolders_unit()' ) ; my $syncT = shift ; @@ -10519,20 +13198,20 @@ sub tests_delete1emptyfolders_unit { return ; } -sub extract_header { +sub extract_header +{ my $string = shift ; my ( $header ) = split /\n\n/x, $string ; if ( ! $header ) { return( q{} ) ; } - #myprint( "[$header]\n" ) ; + #myprint( "[$header]\n" ) ; return( $header ) ; } -sub tests_extract_header { +sub tests_extract_header +{ note( 'Entering tests_extract_header()' ) ; - - my $h = <<'EOM'; Message-Id: <20100428101817.A66CB162474E@plume.est.belle> Date: Wed, 28 Apr 2010 12:18:17 +0200 (CEST) @@ -10566,19 +13245,19 @@ sub decompose_header{ my ($key, $val ) ; my @line = split /\n|\r\n/x, $string ; foreach my $line ( @line ) { - #myprint( "DDD $line\n" ) ; + #myprint( "DDD $line\n" ) ; # End of header last if ( $line =~ m{^$}xo ) ; # Key: value if ( $line =~ m/(^[^:]+):\s(.*)/xo ) { $key = $1 ; $val = $2 ; - $debugdev and myprint( "DDD KV [$key] [$val]\n" ) ; + $debugdev and myprint( "DDD KV [$key] [$val]\n" ) ; push @{ $header->{ $key } }, $val ; # blanc and value => value from previous line continues }elsif( $line =~ m/^(\s+)(.*)/xo ) { $val = $2 ; - $debugdev and myprint( "DDD V [$val]\n" ) ; + $debugdev and myprint( "DDD V [$val]\n" ) ; @{ $header->{ $key } }[ $LAST ] .= " $val" if $key ; # dirty line? }else{ @@ -10700,7 +13379,8 @@ EOH return ; } -sub tests_epoch { +sub tests_epoch +{ note( 'Entering tests_epoch()' ) ; ok( '1282658400' eq epoch( '24-Aug-2010 16:00:00 +0200' ), 'epoch 24-Aug-2010 16:00:00 +0200 -> 1282658400' ) ; @@ -10714,7 +13394,7 @@ sub tests_epoch { ok( '1280671200' eq epoch( '1-Aug-2010 12:00:00 -0200' ), 'epoch 1-Aug-2010 12:00:00 -0200 -> 1280671200' ) ; ok( '1280671200' eq epoch( '1-Aug-2010 16:01:00 +0201' ), 'epoch 1-Aug-2010 16:01:00 +0201 -> 1280671200' ) ; ok( '1280671200' eq epoch( '1-Aug-2010 14:01:00 +0001' ), 'epoch 1-Aug-2010 14:01:00 +0001 -> 1280671200' ) ; - + is( '1280671200', epoch( '1-Aug-2010 14:01:00 +0001' ), 'epoch 1-Aug-2010 14:01:00 +0001 -> 1280671200' ) ; is( '946684800', epoch( '00-Jan-0000 00:00:00 +0000' ), 'epoch 1-Aug-2010 14:01:00 +0001 -> 1280671200' ) ; @@ -10722,7 +13402,8 @@ sub tests_epoch { return ; } -sub epoch { +sub epoch +{ # incoming format: # internal date 24-Aug-2010 16:00:00 +0200 @@ -10736,10 +13417,10 @@ sub epoch { my $time ; if ( $d =~ m{(\d{1,2})-([A-Z][a-z]{2})-(\d{4})\s(\d{2}):(\d{2}):(\d{2})\s((?:\+|-))(\d{2})(\d{2})}xo ) { - #myprint( "internal: [$1][$2][$3][$4][$5][$6][$7][$8][$9]\n" ) ; + #myprint( "internal: [$1][$2][$3][$4][$5][$6][$7][$8][$9]\n" ) ; ( $mday, $month, $year, $hour, $min, $sec, $sign, $zone_h, $zone_m ) = ( $1, $2, $3, $4, $5, $6, $7, $8, $9 ) ; - #myprint( "( $mday, $month, $year, $hour, $min, $sec, $sign, $zone_h, $zone_m )\n" ) ; + #myprint( "( $mday, $month, $year, $hour, $min, $sec, $sign, $zone_h, $zone_m )\n" ) ; $sign = +1 if ( '+' eq $sign ) ; $sign = $MINUS_ONE if ( '-' eq $sign ) ; @@ -10756,7 +13437,8 @@ sub epoch { return( $time ) ; } -sub tests_add_header { +sub tests_add_header +{ note( 'Entering tests_add_header()' ) ; ok( 'Message-Id: ' eq add_header(), 'add_header no arg' ) ; @@ -10766,7 +13448,8 @@ sub tests_add_header { return ; } -sub add_header { +sub add_header +{ my $header_uid = shift || 'mistake' ; my $header_Message_Id = 'Message-Id: <' . $header_uid . '@imapsync>' ; return( $header_Message_Id ) ; @@ -10775,7 +13458,8 @@ sub add_header { -sub tests_max_line_length { +sub tests_max_line_length +{ note( 'Entering tests_max_line_length()' ) ; ok( 0 == max_line_length( q{} ), 'max_line_length: 0 == null string' ) ; @@ -10797,7 +13481,8 @@ sub tests_max_line_length { return ; } -sub max_line_length { +sub max_line_length +{ my $string = shift ; my $max = 0 ; @@ -10808,7 +13493,8 @@ sub max_line_length { } -sub tests_setlogfile { +sub tests_setlogfile +{ note( 'Entering tests_setlogfile()' ) ; my $mysync = {} ; @@ -10860,7 +13546,7 @@ sub tests_setlogfile { is( "$DEFAULT_LOGDIR/1970_01_01_00_00_02_000_user1_user2_remote_abort.txt", setlogfile( $mysync ), "setlogfile: default is like $DEFAULT_LOGDIR/1970_01_01_00_00_02_000_user1_user2_remote_abort.txt" ) ; - + $mysync = { timestart => 2, user1 => 'user1', @@ -10894,37 +13580,39 @@ sub tests_setlogfile { "setlogfile: logdir undef, $DEFAULT_LOGDIR/1970_01_01_00_00_00_000_us_er1a_______b_u_ser2a_______b.txt" ) ; - + } ; note( 'Leaving tests_setlogfile()' ) ; return ; } -sub setlogfile { +sub setlogfile +{ my( $mysync ) = shift ; - + # When aborting another process the log file name finishes with "_abort.txt" my $abort_suffix = ( $mysync->{abort} ) ? '_abort' : q{} ; # When acting as a proxy the log file name finishes with "_remote.txt" # proxy mode is not done yet my $remote_suffix = ( $mysync->{remote} ) ? '_remote' : q{} ; - + my $suffix = ( filter_forbidden_characters( move_slash( $mysync->{user1} ) ) || q{} ) - . '_' - . ( filter_forbidden_characters( move_slash( $mysync->{user2} ) ) || q{} ) + . '_' + . ( filter_forbidden_characters( move_slash( $mysync->{user2} ) ) || q{} ) . $remote_suffix . $abort_suffix ; $mysync->{logdir} = defined $mysync->{logdir} ? $mysync->{logdir} : $DEFAULT_LOGDIR ; - - $mysync->{logfile} = defined $mysync->{logfile} - ? "$mysync->{logdir}/$mysync->{logfile}" + + $mysync->{logfile} = defined $mysync->{logfile} + ? "$mysync->{logdir}/$mysync->{logfile}" : logfile( $mysync->{timestart}, $suffix, $mysync->{logdir} ) ; return( $mysync->{logfile} ) ; } -sub tests_logfile { +sub tests_logfile +{ note( 'Entering tests_logfile()' ) ; SKIP: { @@ -10942,10 +13630,10 @@ sub tests_logfile { is( '2010_08_24_14_01_01_000_poupinette.txt', logfile( 1_282_658_461, 'poupinette' ), 'logfile: 1_282_658_461 poupinette => 2010_08_24_14_01_01_poupinette.txt' ) ; is( '2010_08_24_14_01_01_000_removeblanks.txt', logfile( 1_282_658_461, ' remove blanks ' ), 'logfile: 1_282_658_461 remove blanks => 2010_08_24_14_01_01_000_removeblanks' ) ; - is( '2010_08_24_14_01_01_234_poup.txt', logfile( 1_282_658_461.2347, 'poup' ), + is( '2010_08_24_14_01_01_234_poup.txt', logfile( 1_282_658_461.2347, 'poup' ), 'logfile: 1_282_658_461.2347 poup => 2010_08_24_14_01_01_234_poup.txt' ) ; - is( 'dirdir/2010_08_24_14_01_01_234_poup.txt', logfile( 1_282_658_461.2347, 'poup', 'dirdir' ), + is( 'dirdir/2010_08_24_14_01_01_234_poup.txt', logfile( 1_282_658_461.2347, 'poup', 'dirdir' ), 'logfile: 1_282_658_461.2347 poup dirdir => dirdir/2010_08_24_14_01_01_234_poup.txt' ) ; @@ -10959,7 +13647,8 @@ sub tests_logfile { } -sub logfile { +sub logfile +{ my ( $time, $suffix, $dir ) = @_ ; $time ||= 0 ; @@ -10979,7 +13668,8 @@ sub logfile { -sub tests_move_slash { +sub tests_move_slash +{ note( 'Entering tests_move_slash()' ) ; is( undef, move_slash( ), 'move_slash: no parameters => undef' ) ; @@ -10989,7 +13679,8 @@ sub tests_move_slash { return ; } -sub move_slash { +sub move_slash +{ my $string = shift ; if ( ! defined $string ) { return ; } @@ -11002,21 +13693,22 @@ sub move_slash { -sub tests_million_folders_baby_2 { +sub tests_million_folders_baby_2 +{ note( 'Entering tests_million_folders_baby_2()' ) ; my %long ; @long{ 1 .. 900_000 } = (1) x 900_000 ; - #myprint( %long, "\n" ) ; + #myprint( %long, "\n" ) ; my $pasglop = 0 ; foreach my $elem ( 1 .. 900_000 ) { - #$debug and myprint( "$elem " ) ; + #$debug and myprint( "$elem " ) ; if ( not exists $long{ $elem } ) { $pasglop++ ; } } ok( 0 == $pasglop, 'tests_million_folders_baby_2: search among 900_000' ) ; - # myprint( "$pasglop\n" ) ; + # myprint( "$pasglop\n" ) ; note( 'Leaving tests_million_folders_baby_2()' ) ; return ; @@ -11024,7 +13716,8 @@ sub tests_million_folders_baby_2 { -sub tests_always_fail { +sub tests_always_fail +{ note( 'Entering tests_always_fail()' ) ; is( 0, 1, 'always_fail: 0 is 1' ) ; @@ -11033,31 +13726,87 @@ sub tests_always_fail { return ; } -sub logfileprepa { - my $logfile = shift ; - my $dirname = dirname( $logfile ) ; - do_valid_directory( $dirname ) || return( 0 ) ; - return( 1 ) ; +sub tests_logfileprepa +{ + note( 'Entering tests_logfileprepa()' ) ; + + is( undef, logfileprepa( ), 'logfileprepa: no args => undef' ) ; + my $logfile = 'W/tmp/tests/tests_logfileprepa.txt' ; + is( 1, logfileprepa( $logfile ), 'logfileprepa: W/tmp/tests/tests_logfileprepa.txt => 1' ) ; + + note( 'Leaving tests_logfileprepa()' ) ; + return ; } -sub teelaunch { +sub logfileprepa +{ + my $logfile = shift ; + + if ( ! defined( $logfile ) ) + { + return ; + }else + { + #myprint( "[$logfile]\n" ) ; + my $dirname = dirname( $logfile ) ; + do_valid_directory( $dirname ) || return( 0 ) ; + return( 1 ) ; + } +} + + +sub tests_teelaunch +{ + note( 'Entering tests_teelaunch()' ) ; + + is( undef, teelaunch( ), 'teelaunch: no args => undef' ) ; + my $mysync = {} ; + is( undef, teelaunch( $mysync ), 'teelaunch: arg empty {} => undef' ) ; + $mysync->{logfile} = '' ; + is( undef, teelaunch( $mysync ), 'teelaunch: logfile empty string => undef' ) ; + $mysync->{logfile} = 'W/tmp/tests/tests_teelaunch.txt' ; + isa_ok( my $tee = teelaunch( $mysync ), 'IO::Tee' , 'teelaunch: logfile W/tmp/tests/tests_teelaunch.txt' ) ; + is( 1, print( $tee "Hi!\n" ), 'teelaunch: write Hi!') ; + is( "Hi!\n", file_to_string( 'W/tmp/tests/tests_teelaunch.txt' ), 'teelaunch: reading W/tmp/tests/tests_teelaunch.txt is Hi!\n' ) ; + is( 1, print( $tee "Hoo\n" ), 'teelaunch: write Hoo') ; + is( "Hi!\nHoo\n", file_to_string( 'W/tmp/tests/tests_teelaunch.txt' ), 'teelaunch: reading W/tmp/tests/tests_teelaunch.txt is Hi!\nHoo\n' ) ; + + note( 'Leaving tests_teelaunch()' ) ; + return ; +} + +sub teelaunch +{ my $mysync = shift ; + + if ( ! defined( $mysync ) ) + { + return ; + } + my $logfile = $mysync->{logfile} ; + + if ( ! $logfile ) + { + return ; + } + logfileprepa( $logfile ) || croak "Error no valid directory to write log file $logfile : $OS_ERROR" ; + # This is a log file opened during the whole sync + ## no critic (InputOutput::RequireBriefOpen) open my $logfile_handle, '>', $logfile or croak( "Can not open $logfile for write: $OS_ERROR" ) ; my $tee = IO::Tee->new( $logfile_handle, \*STDOUT ) ; - #*STDERR = *$tee{IO} ; - #select $tee ; $tee->autoflush( 1 ) ; $mysync->{logfile_handle} = $logfile_handle ; $mysync->{tee} = $tee ; return $tee ; } -sub getpwuid_any_os { +sub getpwuid_any_os +{ my $uid = shift ; return( scalar getlogin ) if ( 'MSWin32' eq $OSNAME ) ; # Windows system @@ -11066,13 +13815,14 @@ sub getpwuid_any_os { } -sub simulong { +sub simulong +{ my $max_seconds = shift ; my $division = 5 ; - my $last = $division * $max_seconds ; - foreach my $i ( 1 .. ( $last ) ) { - myprint( "Are you still here $i/$last\n" ) ; - #myprint( "Are you still here $i/$last\n" . ( "Ah" x 40 . "\n") x 4000 ) ; + my $last_count = $division * $max_seconds ; + foreach my $i ( 1 .. ( $last_count ) ) { + myprint( "Are you still here $i/$last_count\n" ) ; + #myprint( "Are you still here $i/$last_count\n" . ( "Ah" x 40 . "\n") x 4000 ) ; sleep( 1 / $division ) ; } @@ -11081,20 +13831,22 @@ sub simulong { -sub printenv { +sub printenv +{ myprint( "Environment variables listing:\n", - ( map { "$_ => $ENV{$_}\n" } sort keys %ENV), - "Environment variables listing end\n" ) ; + ( map { "$_ => $ENV{$_}\n" } sort keys %ENV), + "Environment variables listing end\n" ) ; return ; } -sub testsexit { +sub testsexit +{ my $mysync = shift ; if ( ! ( $mysync->{ tests } or $mysync->{ testsdebug } or $mysync->{ testsunit } ) ) { return ; } my $test_builder = Test::More->builder ; - tests( $mysync ) ; + tests( $mysync ) ; testsdebug( $mysync ) ; testunitsession( $mysync ) ; @@ -11110,18 +13862,32 @@ sub testsexit { "List of failed tests:\n", $tests_failed ) ; exit $EXIT_TESTS_FAILED ; } - exit ; - #return ; + + cleanup_mess_from_tests( ) ; + # Cover is larger with --tests --testslive + if ( ! $mysync->{ testslive } ) + { + exit ; + } + # $eeee ; + return ; } -sub after_get_options { +sub cleanup_mess_from_tests +{ + undef @pipemess ; + return ; +} + +sub after_get_options +{ my $mysync = shift ; my $numopt = shift ; # exit with --help option or no option at all - $debug and myprint( "numopt:$numopt\n" ) ; - + $mysync->{ debug } and myprint( "numopt:$numopt\n" ) ; + if ( $help or not $numopt ) { myprint( usage( $mysync ) ) ; exit ; @@ -11130,21 +13896,82 @@ sub after_get_options { return ; } -sub easyany { +sub tests_remove_edging_blanks +{ + note( 'Entering tests_remove_edging_blanks()' ) ; + + is( undef, remove_edging_blanks( ), 'remove_edging_blanks: no args => undef' ) ; + is( 'abcd', remove_edging_blanks( 'abcd' ), 'remove_edging_blanks: abcd => abcd' ) ; + is( 'ab cd', remove_edging_blanks( ' ab cd ' ), 'remove_edging_blanks: " ab cd " => "ab cd"' ) ; + + note( 'Leaving tests_remove_edging_blanks()' ) ; + return ; +} + + + +sub remove_edging_blanks +{ + my $string = shift ; + if ( ! defined $string ) + { + return ; + } + $string =~ s,^ +| +$,,g ; + return $string ; +} + + +sub tests_sanitize +{ + note( 'Entering tests_remove_edging_blanks()' ) ; + + is( undef, sanitize( ), 'sanitize: no args => undef' ) ; + my $mysync = {} ; + + $mysync->{ host1 } = ' example.com ' ; + $mysync->{ user1 } = ' to to ' ; + $mysync->{ password1 } = ' sex is good! ' ; + is( undef, sanitize( $mysync ), 'sanitize: => undef' ) ; + is( 'example.com', $mysync->{ host1 }, 'sanitize: host1 " example.com " => "example.com"' ) ; + is( 'to to', $mysync->{ user1 }, 'sanitize: user1 " to to " => "to to"' ) ; + is( 'sex is good!', $mysync->{ password1 }, 'sanitize: password1 " sex is good! " => "sex is good!"' ) ; + note( 'Leaving tests_remove_edging_blanks()' ) ; + return ; +} + + +sub sanitize +{ + my $mysync = shift ; + if ( ! defined $mysync ) + { + return ; + } + + foreach my $parameter ( qw( host1 host2 user1 user2 password1 password2 ) ) + { + $mysync->{ $parameter } = remove_edging_blanks( $mysync->{ $parameter } ) ; + } + return ; +} + +sub easyany +{ my $mysync = shift ; # Gmail if ( $mysync->{gmail1} and $mysync->{gmail2} ) { - $debug and myprint( "gmail1 gmail2\n") ; + $mysync->{ debug } and myprint( "gmail1 gmail2\n") ; gmail12( $mysync ) ; return ; } if ( $mysync->{gmail1} ) { - $debug and myprint( "gmail1\n" ) ; + $mysync->{ debug } and myprint( "gmail1\n" ) ; gmail1( $mysync ) ; } if ( $mysync->{gmail2} ) { - $debug and myprint( "gmail2\n" ) ; + $mysync->{ debug } and myprint( "gmail2\n" ) ; gmail2( $mysync ) ; } # Office 365 @@ -11156,6 +13983,16 @@ sub easyany { office2( $mysync ) ; } + # Exchange + if ( $mysync->{exchange1} ) { + exchange1( $mysync ) ; + } + + if ( $mysync->{exchange2} ) { + exchange2( $mysync ) ; + } + + # Domino if ( $mysync->{domino1} ) { domino1( $mysync ) ; @@ -11165,135 +14002,185 @@ sub easyany { domino2( $mysync ) ; } - return ; } # From https://imapsync.lamiral.info/FAQ.d/FAQ.Gmail.txt -sub gmail12 { +sub gmail12 +{ my $mysync = shift ; # Gmail at host1 and host2 $mysync->{host1} ||= 'imap.gmail.com' ; $mysync->{ssl1} = ( defined $mysync->{ssl1} ) ? $mysync->{ssl1} : 1 ; $mysync->{host2} ||= 'imap.gmail.com' ; $mysync->{ssl2} = ( defined $mysync->{ssl2} ) ? $mysync->{ssl2} : 1 ; - $mysync->{maxbytespersecond} ||= 20_000 ; # should be 10_000 computed from by Gmail documentation + $mysync->{maxbytespersecond} ||= 20_000 ; # should be 10_000 when computed from Gmail documentation $mysync->{maxbytesafter} ||= 1_000_000_000 ; - $mysync->{automap} = ( defined $mysync->{automap} ) ? $mysync->{automap} : 1 ; - $mysync->{maxsleep} = $MAX_SLEEP ; - + $mysync->{automap} = ( defined $mysync->{automap} ) ? $mysync->{automap} : 1 ; + $mysync->{maxsleep} = ( defined $mysync->{maxsleep} ) ? $mysync->{maxsleep} : $MAX_SLEEP ; ; + $skipcrossduplicates = ( defined $skipcrossduplicates ) ? $skipcrossduplicates : 0 ; + $mysync->{ synclabels } = ( defined $mysync->{ synclabels } ) ? $mysync->{ synclabels } : 1 ; + $mysync->{ reynclabels } = ( defined $mysync->{ reynclabels } ) ? $mysync->{ reynclabels } : 1 ; push @exclude, '\[Gmail\]$' ; + push @folderlast, '[Gmail]/All Mail' ; return ; } -sub gmail1 { + +sub gmail1 +{ my $mysync = shift ; # Gmail at host2 $mysync->{host1} ||= 'imap.gmail.com' ; $mysync->{ssl1} = ( defined $mysync->{ssl1} ) ? $mysync->{ssl1} : 1 ; $mysync->{maxbytespersecond} ||= 40_000 ; # should be 20_000 computed from by Gmail documentation $mysync->{maxbytesafter} ||= 2_500_000_000 ; - $mysync->{automap} = ( defined $mysync->{automap} ) ? $mysync->{automap} : 1 ; + $mysync->{automap} = ( defined $mysync->{automap} ) ? $mysync->{automap} : 1 ; + $mysync->{maxsleep} = ( defined $mysync->{maxsleep} ) ? $mysync->{maxsleep} : $MAX_SLEEP ; ; $skipcrossduplicates = ( defined $skipcrossduplicates ) ? $skipcrossduplicates : 1 ; - $mysync->{maxsleep} = $MAX_SLEEP ; - + push @useheader, 'X-Gmail-Received', 'Message-Id' ; - push @regextrans2, 's,\[Gmail\].,,' ; - push @folderlast, '[Gmail]/All Mail' ; + push @{ $mysync->{ regextrans2 } }, 's,\[Gmail\].,,' ; + push @folderlast, '[Gmail]/All Mail' ; return ; } -sub gmail2 { +sub gmail2 +{ my $mysync = shift ; # Gmail at host2 $mysync->{host2} ||= 'imap.gmail.com' ; $mysync->{ssl2} = ( defined $mysync->{ssl2} ) ? $mysync->{ssl2} : 1 ; $mysync->{maxbytespersecond} ||= 20_000 ; # should be 10_000 computed from by Gmail documentation $mysync->{maxbytesafter} ||= 1_000_000_000 ; # In fact it is documented as half: 500_000_000 - $maxsize ||= 25_000_000 ; + #$mysync->{ maxsize } ||= 25_000_000 ; $mysync->{automap} = ( defined $mysync->{automap} ) ? $mysync->{automap} : 1 ; - $skipcrossduplicates = ( defined $skipcrossduplicates ) ? $skipcrossduplicates : 1 ; - $expunge1 = ( defined $expunge1 ) ? $expunge1 : 1 ; + #$skipcrossduplicates = ( defined $skipcrossduplicates ) ? $skipcrossduplicates : 1 ; + $mysync->{ expunge1 } = ( defined $mysync->{ expunge1 } ) ? $mysync->{ expunge1 } : 1 ; $mysync->{addheader} = ( defined $mysync->{addheader} ) ? $mysync->{addheader} : 1 ; - $mysync->{maxsleep} = $MAX_SLEEP ; - - push @exclude, '\[Gmail\]$' ; - push @useheader, 'X-Gmail-Received', 'Message-Id' ; - push @regextrans2, 's,\[Gmail\].,,' ; - push @regextrans2, 's/[ ]+/_/g' ; - push @regextrans2, q{s/['\\^"]/_/g} ; # Verified this - push @folderlast, "[Gmail]/All Mail" ; + $mysync->{maxsleep} = ( defined $mysync->{maxsleep} ) ? $mysync->{maxsleep} : $MAX_SLEEP ; ; + + $mysync->{maxsize} = ( defined $mysync->{maxsize} ) ? $mysync->{maxsize} : $GMAIL_MAXSIZE ; + + if ( ! $mysync->{noexclude} ) { + push @exclude, '\[Gmail\]$' ; + } + push @useheader, 'Message-Id' ; + push @{ $mysync->{ regextrans2 } }, 's,\[Gmail\].,,' ; + + # push @{ $mysync->{ regextrans2 } }, 's/[ ]+/_/g' ; # is now replaced + # by the two more specific following regexes, + # they remove just the beginning and trailing blanks, not all. + push @{ $mysync->{ regextrans2 } }, 's,^ +| +$,,g' ; + push @{ $mysync->{ regextrans2 } }, 's,/ +| +/,/,g' ; + # + push @{ $mysync->{ regextrans2 } }, q{s/['\\^"]/_/g} ; # Verified this + push @folderlast, '[Gmail]/All Mail' ; return ; } # From https://imapsync.lamiral.info/FAQ.d/FAQ.Exchange.txt -sub office1 { +sub office1 +{ # Office 365 at host1 my $mysync = shift ; - $debug and myprint( "office1 configuration\n" ) ; + output( $mysync, q{Option --office1 is like: --host1 outlook.office365.com --ssl1 --exclude "^Files$"} . "\n" ) ; + output( $mysync, "Option --office1 (cont) : unless overrided with --host1 otherhost --nossl1 --noexclude\n" ) ; $mysync->{host1} ||= 'outlook.office365.com' ; $mysync->{ssl1} = ( defined $mysync->{ssl1} ) ? $mysync->{ssl1} : 1 ; + if ( ! $mysync->{noexclude} ) { + push @exclude, '^Files$' ; + } return ; } -sub office2 { - # Office 365 at host1 - my $mysync = shift ; - $mysync->{host2} ||= 'outlook.office365.com' ; - $mysync->{ssl2} = ( defined $mysync->{ssl2} ) ? $mysync->{ssl2} : 1 ; - $maxsize ||= 45_000_000 ; - $mysync->{maxmessagespersecond} ||= 4 ; - push @regexflag, 's/\\Flagged//g' ; - $disarmreadreceipts = ( defined $disarmreadreceipts ) ? $disarmreadreceipts : 1 ; - push @regexmess, 's,(.{10500}),$1\r\n,g' ; - return ; + +sub office2 +{ + # Office 365 at host2 + my $mysync = shift ; + output( $mysync, qq{Option --office2 is like: --host2 outlook.office365.com --ssl2 --maxsize 45_000_000 --maxmessagespersecond 4\n} ) ; + output( $mysync, qq{Option --office2 (cont) : --disarmreadreceipts --regexmess "wrap 10500" --f1f2 "Files=Files_renamed_by_imapsync"\n} ) ; + output( $mysync, qq{Option --office2 (cont) : unless overrided with --host2 otherhost --nossl2 ... --nodisarmreadreceipts --noregexmess\n} ) ; + output( $mysync, qq{Option --office2 (cont) : and --nof1f2 to avoid Files folder renamed to Files_renamed_by_imapsync\n} ) ; + $mysync->{host2} ||= 'outlook.office365.com' ; + $mysync->{ssl2} = ( defined $mysync->{ssl2} ) ? $mysync->{ssl2} : 1 ; + $mysync->{ maxsize } ||= 45_000_000 ; + $mysync->{maxmessagespersecond} ||= 4 ; + #push @regexflag, 's/\\\\Flagged//g' ; # No problem without! tested 2018_09_10 + $disarmreadreceipts = ( defined $disarmreadreceipts ) ? $disarmreadreceipts : 1 ; + # I dislike double negation but here is one + if ( ! $mysync->{noregexmess} ) + { + push @regexmess, 's,(.{10500}),$1\r\n,g' ; + } + # and another... + if ( ! $mysync->{nof1f2} ) + { + push @{ $mysync->{f1f2} }, 'Files=Files_renamed_by_imapsync' ; + } + return ; } -sub exchange1 { - # Exchange 2010/2013 at host1 - - # Well nothing to do so far - return ; +sub exchange1 +{ + # Exchange 2010/2013 at host1 + my $mysync = shift ; + output( $mysync, "Option --exchange1 does nothing (except printing this line...)\n" ) ; + # Well nothing to do so far + return ; } -sub exchange2 { - # Exchange 2010/2013 at host2 - my $mysync = shift ; - $maxsize ||= 10_000_000 ; - $mysync->{maxmessagespersecond} ||= 4 ; - push @regexflag, 's/\\Flagged//g' ; - $disarmreadreceipts = ( defined $disarmreadreceipts ) ? $disarmreadreceipts : 1 ; - push @regexmess, 's,(.{10500}),$1\r\n,g' ; - return ; +sub exchange2 +{ + # Exchange 2010/2013 at host2 + my $mysync = shift ; + output( $mysync, "Option --exchange2 is like: --maxsize 10_000_000 --maxmessagespersecond 4 --disarmreadreceipts\n" ) ; + output( $mysync, "Option --exchange2 (cont) : --regexflag del Flagged --regexmess wrap 10500\n" ) ; + output( $mysync, "Option --exchange2 (cont) : unless overrided with --maxsize xxx --nodisarmreadreceipts --noregexflag --noregexmess\n" ) ; + $mysync->{ maxsize } ||= 10_000_000 ; + $mysync->{maxmessagespersecond} ||= 4 ; + $disarmreadreceipts = ( defined $disarmreadreceipts ) ? $disarmreadreceipts : 1 ; + # I dislike double negation but here are two + if ( ! $mysync->{noregexflag} ) { + push @regexflag, 's/\\\\Flagged//g' ; + } + if ( ! $mysync->{noregexmess} ) { + push @regexmess, 's,(.{10500}),$1\r\n,g' ; + } + return ; } -sub domino1 { +sub domino1 +{ # Domino at host1 my $mysync = shift ; - $sep1 = q{\\} ; + $mysync->{ sep1 } = q{\\} ; $prefix1 = q{} ; $messageidnodomain = ( defined $messageidnodomain ) ? $messageidnodomain : 1 ; return ; } -sub domino2 { +sub domino2 +{ # Domino at host1 my $mysync = shift ; - $sep2 = q{\\} ; + $mysync->{ sep2 } = q{\\} ; $prefix2 = q{} ; $messageidnodomain = ( defined $messageidnodomain ) ? $messageidnodomain : 1 ; - push @regextrans2, 's,^Inbox\\\\(.*),$1,i' ; + push @{ $mysync->{ regextrans2 } }, 's,^Inbox\\\\(.*),$1,i' ; return ; } -sub tests_resolv { +sub tests_resolv +{ note( 'Entering tests_resolv()' ) ; - + # is( , resolv( ), 'resolv: => ' ) ; is( undef, resolv( ), 'resolv: no args => undef' ) ; is( undef, resolv( '' ), 'resolv: empty string => undef' ) ; @@ -11301,7 +14188,7 @@ sub tests_resolv { is( '127.0.0.1', resolv( '127.0.0.1' ), 'resolv: 127.0.0.1 => 127.0.0.1' ) ; is( '127.0.0.1', resolv( 'localhost' ), 'resolv: localhost => 127.0.0.1' ) ; is( '5.135.158.182', resolv( 'imapsync.lamiral.info' ), 'resolv: imapsync.lamiral.info => 5.135.158.182' ) ; - + # ip6-localhost ( in /etc/hosts ) is( '::1', resolv( 'ip6-localhost' ), 'resolv: ip6-localhost => ::1' ) ; is( '::1', resolv( '::1' ), 'resolv: ::1 => ::1' ) ; @@ -11311,64 +14198,67 @@ sub tests_resolv { # ks3 is( '2001:41d0:8:bebd::1', resolv( '2001:41d0:8:bebd::1' ), 'resolv: 2001:41d0:8:bebd::1 => 2001:41d0:8:bebd::1' ) ; is( '2001:41d0:8:bebd::1', resolv( 'ks3ipv6.lamiral.info' ), 'resolv: ks3ipv6.lamiral.info => 2001:41d0:8:bebd::1' ) ; - - + + note( 'Leaving tests_resolv()' ) ; return ; } -sub resolv { +sub resolv +{ my $host = shift @ARG ; - + if ( ! $host ) { return ; } my $addr ; if ( defined &Socket::getaddrinfo ) { $addr = resolv_with_getaddrinfo( $host ) ; return( $addr ) ; } - - - + + + my $iaddr = inet_aton( $host ) ; if ( ! $iaddr ) { return ; } $addr = inet_ntoa( $iaddr ) ; - + return $addr ; } -sub resolv_with_getaddrinfo { +sub resolv_with_getaddrinfo +{ my $host = shift @ARG ; - + if ( ! $host ) { return ; } - my ( $err, @res ) = Socket::getaddrinfo( $host, "", { socktype => Socket::SOCK_RAW } ) ; - if ( $err ) { - myprint( "Cannot getaddrinfo of $host: $err\n" ) ; + my ( $err_getaddrinfo, @res ) = Socket::getaddrinfo( $host, "", { socktype => Socket::SOCK_RAW } ) ; + if ( $err_getaddrinfo ) { + myprint( "Cannot getaddrinfo of $host: $err_getaddrinfo\n" ) ; return ; } my @addr ; while( my $ai = shift @res ) { - my ( $err, $ipaddr ) = Socket::getnameinfo( $ai->{addr}, Socket::NI_NUMERICHOST(), Socket::NIx_NOSERV() ) ; - if ( $err ) { - myprint( "Cannot getnameinfo of $host: $err\n" ) ; + my ( $err_getnameinfo, $ipaddr ) = Socket::getnameinfo( $ai->{addr}, Socket::NI_NUMERICHOST(), Socket::NIx_NOSERV() ) ; + if ( $err_getnameinfo ) { + myprint( "Cannot getnameinfo of $host: $err_getnameinfo\n" ) ; return ; } - $debug and myprint( "$host => $ipaddr\n" ) ; + $sync->{ debug } and myprint( "$host => $ipaddr\n" ) ; push @addr, $ipaddr ; - - my ( $err_r, $reverse ) = Socket::getnameinfo( $ai->{addr}, 0, Socket::NIx_NOSERV() ) ; - $debug and myprint( "$host => $ipaddr => $reverse\n" ) ; + my $reverse ; + ( $err_getnameinfo, $reverse ) = Socket::getnameinfo( $ai->{addr}, 0, Socket::NIx_NOSERV() ) ; + $sync->{ debug } and myprint( "$host => $ipaddr => $reverse\n" ) ; } - + return $addr[0] ; } -sub tests_resolvrev { +sub tests_resolvrev +{ note( 'Entering tests_resolvrev()' ) ; - + # is( , resolvrev( ), 'resolvrev: => ' ) ; is( undef, resolvrev( ), 'resolvrev: no args => undef' ) ; is( undef, resolvrev( '' ), 'resolvrev: empty string => undef' ) ; @@ -11376,7 +14266,7 @@ sub tests_resolvrev { is( 'localhost', resolvrev( '127.0.0.1' ), 'resolvrev: 127.0.0.1 => localhost' ) ; is( 'localhost', resolvrev( 'localhost' ), 'resolvrev: localhost => localhost' ) ; is( 'ks.lamiral.info', resolvrev( 'imapsync.lamiral.info' ), 'resolvrev: imapsync.lamiral.info => ks.lamiral.info' ) ; - + # ip6-localhost ( in /etc/hosts ) is( 'ip6-localhost', resolvrev( 'ip6-localhost' ), 'resolvrev: ip6-localhost => ip6-localhost' ) ; is( 'ip6-localhost', resolvrev( '::1' ), 'resolvrev: ::1 => ip6-localhost' ) ; @@ -11386,28 +14276,30 @@ sub tests_resolvrev { # ks3 is( 'ks3ipv6.lamiral.info', resolvrev( '2001:41d0:8:bebd::1' ), 'resolvrev: 2001:41d0:8:bebd::1 => ks3ipv6.lamiral.info' ) ; is( 'ks3ipv6.lamiral.info', resolvrev( 'ks3ipv6.lamiral.info' ), 'resolvrev: ks3ipv6.lamiral.info => ks3ipv6.lamiral.info' ) ; - - + + note( 'Leaving tests_resolvrev()' ) ; return ; } -sub resolvrev { +sub resolvrev +{ my $host = shift @ARG ; - + if ( ! $host ) { return ; } if ( defined &Socket::getaddrinfo ) { my $name = resolvrev_with_getaddrinfo( $host ) ; return( $name ) ; } - + return ; } -sub resolvrev_with_getaddrinfo { +sub resolvrev_with_getaddrinfo +{ my $host = shift @ARG ; - + if ( ! $host ) { return ; } my ( $err, @res ) = Socket::getaddrinfo( $host, "", { socktype => Socket::SOCK_RAW } ) ; @@ -11423,16 +14315,17 @@ sub resolvrev_with_getaddrinfo { myprint( "Cannot getnameinfo of $host: $err\n" ) ; return ; } - $debug and myprint( "$host => $reverse\n" ) ; + $sync->{ debug } and myprint( "$host => $reverse\n" ) ; push @name, $reverse ; } - + return $name[0] ; } -sub tests_imapsping { +sub tests_imapsping +{ note( 'Entering tests_imapsping()' ) ; is( undef, imapsping( ), 'imapsping: no args => undef' ) ; @@ -11443,12 +14336,14 @@ sub tests_imapsping { return ; } -sub imapsping { +sub imapsping +{ my $host = shift ; return tcpping( $host, $IMAP_SSL_PORT ) ; } -sub tests_tcpping { +sub tests_tcpping +{ note( 'Entering tests_tcpping()' ) ; is( undef, tcpping( ), 'tcpping: no args => undef' ) ; @@ -11472,7 +14367,8 @@ sub tests_tcpping { return ; } -sub tcpping { +sub tcpping +{ if ( 2 != scalar( @ARG ) ) { return ; } @@ -11489,8 +14385,8 @@ sub tcpping { $p->hires( 1 ) ; my ($ping_ok, $rtt, $ip ) = $p->ping( $host, $mytimeout ) ; if ( ! defined $ping_ok ) { return ; } - my $rtt_approx = sprintf( "%.3f", $rtt ) ; - $debug and myprint( "Host $host timeout $mytimeout port $port ok $ping_ok ip $ip acked in $rtt_approx s\n" ) ; + my $rtt_approx = sprintf( "%.3f", $rtt ) ; + $sync->{ debug } and myprint( "Host $host timeout $mytimeout port $port ok $ping_ok ip $ip acked in $rtt_approx s\n" ) ; $p->close( ) ; if( $ping_ok ) { return 1 ; @@ -11499,7 +14395,8 @@ sub tcpping { } } -sub tests_sslcheck { +sub tests_sslcheck +{ note( 'Entering tests_sslcheck()' ) ; my $mysync ; @@ -11537,7 +14434,7 @@ sub tests_sslcheck { host1 => 'imapsync.lamiral.info', host2 => 'test2.lamiral.info', } ; - + is( 2, sslcheck( $mysync ), 'sslcheck: imapsync.lamiral.info + test2.lamiral.info => 2' ) ; $mysync = { @@ -11546,21 +14443,22 @@ sub tests_sslcheck { host2 => 'test2.lamiral.info', tls1 => 1, } ; - + is( 1, sslcheck( $mysync ), 'sslcheck: imapsync.lamiral.info + test2.lamiral.info + tls1 => 1' ) ; - + note( 'Leaving tests_sslcheck()' ) ; return ; } -sub sslcheck { +sub sslcheck +{ my $mysync = shift ; if ( ! $mysync->{sslcheck} ) { return ; } my $nb_on = 0 ; - $debug and myprint( "sslcheck\n" ) ; + $mysync->{ debug } and myprint( "sslcheck\n" ) ; if ( ( ! defined $mysync->{port1} ) and @@ -11602,30 +14500,33 @@ sub sslcheck { } -sub testslive { +sub testslive +{ my $mysync = shift ; - $mysync->{host1} = 'test1.lamiral.info' ; - $mysync->{user1} = 'test1' ; - $mysync->{password1} = 'secret1' ; - $mysync->{host2} = 'test2.lamiral.info' ; - $mysync->{user2} = 'test2' ; - $mysync->{password2} ='secret2' ; + $mysync->{host1} ||= 'test1.lamiral.info' ; + $mysync->{user1} ||= 'test1' ; + $mysync->{password1} ||= 'secret1' ; + $mysync->{host2} ||= 'test2.lamiral.info' ; + $mysync->{user2} ||= 'test2' ; + $mysync->{password2} ||= 'secret2' ; return ; } -sub testslive6 { +sub testslive6 +{ my $mysync = shift ; - $mysync->{host1} = 'ks2ipv6.lamiral.info' ; - $mysync->{user1} = 'test1' ; - $mysync->{password1} = 'secret1' ; - $mysync->{host2} = 'ks2ipv6.lamiral.info' ; - $mysync->{user2} = 'test2' ; - $mysync->{password2} ='secret2' ; + $mysync->{host1} ||= 'ks2ipv6.lamiral.info' ; + $mysync->{user1} ||= 'test1' ; + $mysync->{password1} ||= 'secret1' ; + $mysync->{host2} ||= 'ks2ipv6.lamiral.info' ; + $mysync->{user2} ||= 'test2' ; + $mysync->{password2} ||= 'secret2' ; return ; } -sub tests_backslash_caret { +sub tests_backslash_caret +{ note( 'Entering tests_backslash_caret()' ) ; is( "lalala", backslash_caret( "lalala" ), 'backslash_caret: lalala => lalala' ) ; @@ -11643,56 +14544,62 @@ sub tests_backslash_caret { return ; } -sub backslash_caret { +sub backslash_caret +{ my $string = shift ; - + $string =~ s{\\ $ }{^}gxms ; return $string ; } -sub tests_split_around_equal { +sub tests_split_around_equal +{ note( 'Entering tests_split_around_equal()' ) ; + is( undef, split_around_equal( ), 'split_around_equal: no args => undef' ) ; is_deeply( { toto => 'titi' }, { split_around_equal( 'toto=titi' ) }, 'split_around_equal: toto=titi => toto => titi' ) ; is_deeply( { A => 'B', C => 'D' }, { split_around_equal( 'A=B=C=D' ) }, 'split_around_equal: toto=titi => toto => titi' ) ; is_deeply( { A => 'B', C => 'D' }, { split_around_equal( 'A=B', 'C=D' ) }, 'split_around_equal: A=B C=D => A => B, C=>D' ) ; - + note( 'Leaving tests_split_around_equal()' ) ; return ; } -sub split_around_equal { +sub split_around_equal +{ if ( ! @ARG ) { return ; } ; return map { split /=/mxs, $_ } @ARG ; - + } -sub tests_sig_install { +sub tests_sig_install +{ note( 'Entering tests_sig_install()' ) ; - my $mysync ; + + my $mysync ; is( undef, sig_install( ), 'sig_install: no args => undef' ) ; is( undef, sig_install( $mysync ), 'sig_install: arg undef => undef' ) ; $mysync = { } ; is( undef, sig_install( $mysync ), 'sig_install: empty hash => undef' ) ; - + SKIP: { Readonly my $SKIP_15 => 15 ; if ( 'MSWin32' eq $OSNAME ) { skip( 'Tests only for Unix', $SKIP_15 ) ; } # Default to ignore USR1 USR2 in case future install fails local $SIG{ USR1 } = local $SIG{ USR2 } = sub { } ; kill( 'USR1', $PROCESS_ID ) ; - + $mysync->{ debugsig } = 1 ; - # Assign USR1 to call sub tototo + # Assign USR1 to call sub tototo # Surely a better value than undef should be returned when doing real signal stuff is( undef, sig_install( $mysync, \&tototo, 'USR1' ), 'sig_install: USR1 tototo' ) ; is( 1, kill( 'USR1', $PROCESS_ID ), 'sig_install: kill USR1 myself 1' ) ; is( 1, $mysync->{ tototo_calls }, 'sig_install: tototo call nb 1' ) ; - - # Assign USR2 to call sub tototo + + # Assign USR2 to call sub tototo is( undef, sig_install( $mysync, \&tototo, 'USR2' ), 'sig_install: USR2 tototo' ) ; is( 1, kill( 'USR2', $PROCESS_ID ), 'sig_install: kill USR2 myself 1' ) ; @@ -11701,87 +14608,104 @@ sub tests_sig_install { is( 1, kill( 'USR1', $PROCESS_ID ), 'sig_install: kill USR1 myself 2' ) ; is( 3, $mysync->{ tototo_calls }, 'sig_install: tototo call nb 3' ) ; - + local $SIG{ USR1 } = local $SIG{ USR2 } = sub { } ; is( 1, kill( 'USR1', $PROCESS_ID ), 'sig_install: kill USR1 myself 3' ) ; is( 3, $mysync->{ tototo_calls }, 'sig_install: tototo call still nb 3' ) ; - - # Assign USR1 + USR2 to call sub tototo + + # Assign USR1 + USR2 to call sub tototo is( undef, sig_install( $mysync, \&tototo, 'USR1', 'USR2' ), 'sig_install: USR1 USR2 tototo' ) ; is( 1, kill( 'USR1', $PROCESS_ID ), 'sig_install: kill USR1 myself 4' ) ; is( 4, $mysync->{ tototo_calls }, 'sig_install: tototo call now nb 4' ) ; - + is( 1, kill( 'USR2', $PROCESS_ID ), 'sig_install: kill USR1 myself 2' ) ; is( 5, $mysync->{ tototo_calls }, 'sig_install: tototo call now nb 5' ) ; } - - note( 'Leaving tests_sig_install()' ) ; + + note( 'Leaving tests_sig_install()' ) ; return ; } - -sub sig_install { +# +sub sig_install +{ my $mysync = shift ; if ( ! $mysync ) { return ; } my $mysub = shift ; if ( ! $mysub ) { return ; } - + my @signals = @ARG ; - - $mysync->{ debugsig } and myprint( "In sig_install with $mysync and $mysub\n" ) ; - + + $mysync->{ debugsig } and myprint( "In sig_install with $mysync and $mysub\n" ) ; + my $subsignal = sub { my $signame = shift ; - $mysync->{ debugsig } and myprint( "In subsignal with $signame and $mysync\n" ) ; + $mysync->{ debugsig } and myprint( "In subsignal with $signame and $mysync\n" ) ; &$mysub( $mysync, $signame ) ; } ; foreach my $signal ( @signals ) { $mysync->{ debugsig } and myprint( "Installing signal $signal for $subsignal\n") ; output( $mysync, "kill -$signal $PROCESS_ID # special behavior\n" ) ; + ## no critic (RequireLocalizedPunctuationVars) $SIG{ $signal } = $subsignal ; } return ; } -sub tototo { +sub tototo +{ my $mysync = shift ; myprint("In tototo with @ARG\n" ) ; $mysync->{ tototo_calls } += 1 ; return ; } -sub tests_toggle_sleep { +sub mygetppid +{ + if ( 'MSWin32' eq $OSNAME ) { + return( 'unknown under MSWin32 (too complicated)' ) ; + } else { + # Unix + return( getppid( ) ) ; + } +} + + + +sub tests_toggle_sleep +{ note( 'Entering tests_toggle_sleep()' ) ; + is( undef, toggle_sleep( ), 'toggle_sleep: no args => undef' ) ; my $mysync ; is( undef, toggle_sleep( $mysync ), 'toggle_sleep: undef => undef' ) ; $mysync = { } ; is( undef, toggle_sleep( $mysync ), 'toggle_sleep: no maxsleep => undef' ) ; - + $mysync->{maxsleep} = 3 ; is( 0, toggle_sleep( $mysync ), 'toggle_sleep: 3 => 0' ) ; - + is( $MAX_SLEEP, toggle_sleep( $mysync ), "toggle_sleep: 0 => $MAX_SLEEP" ) ; is( 0, toggle_sleep( $mysync ), "toggle_sleep: $MAX_SLEEP => 0" ) ; is( $MAX_SLEEP, toggle_sleep( $mysync ), "toggle_sleep: 0 => $MAX_SLEEP" ) ; is( 0, toggle_sleep( $mysync ), "toggle_sleep: $MAX_SLEEP => 0" ) ; - + SKIP: { Readonly my $SKIP_9 => 9 ; if ( 'MSWin32' eq $OSNAME ) { skip( 'Tests only for Unix', $SKIP_9 ) ; } # Default to ignore USR1 USR2 in case future install fails local $SIG{ USR1 } = sub { } ; kill( 'USR1', $PROCESS_ID ) ; - + $mysync->{ debugsig } = 1 ; - # Assign USR1 to call sub toggle_sleep + # Assign USR1 to call sub toggle_sleep is( undef, sig_install( $mysync, \&toggle_sleep, 'USR1' ), 'toggle_sleep: install USR1 toggle_sleep' ) ; - + $mysync->{maxsleep} = 4 ; is( 1, kill( 'USR1', $PROCESS_ID ), 'toggle_sleep: kill USR1 myself' ) ; is( 0, $mysync->{ maxsleep }, 'toggle_sleep: toggle_sleep called => sleeps are 0s' ) ; @@ -11795,28 +14719,30 @@ sub tests_toggle_sleep { is( 1, kill( 'USR1', $PROCESS_ID ), 'toggle_sleep: kill USR1 myself again' ) ; is( $MAX_SLEEP, $mysync->{ maxsleep }, "toggle_sleep: toggle_sleep called => sleeps are ${MAX_SLEEP}s" ) ; } - + note( 'Leaving tests_toggle_sleep()' ) ; return ; } -sub toggle_sleep { +sub toggle_sleep +{ my $mysync = shift ; - + myprint("In toggle_sleep with @ARG\n" ) ; if ( !defined( $mysync ) ) { return ; } if ( !defined( $mysync->{maxsleep} ) ) { return ; } - + $mysync->{ maxsleep } = max( 0, $MAX_SLEEP - $mysync->{maxsleep} ) ; myprint("Resetting maxsleep to ", $mysync->{maxsleep}, "s\n" ) ; return $mysync->{maxsleep} ; } -sub mypod2usage { +sub mypod2usage +{ my $fh_pod2usage = shift ; - + pod2usage( -exitval => 'NOEXIT', -noperldoc => 1, @@ -11830,21 +14756,22 @@ sub mypod2usage { return ; } -sub usage { +sub usage +{ my $mysync = shift ; - + if ( ! defined $mysync ) { return ; } - + my $usage = q{} ; my $usage_from_pod ; my $usage_footer = usage_footer( $mysync ) ; # pod2usage writes on a filehandle only and I want a variable - open my $fh_pod2usage, ">", \$usage_from_pod + open my $fh_pod2usage, ">", \$usage_from_pod or do { warn $OS_ERROR ; return ; } ; mypod2usage( $fh_pod2usage ) ; close $fh_pod2usage ; - + if ( 'MSWin32' eq $OSNAME ) { $usage_from_pod = backslash_caret( $usage_from_pod ) ; } @@ -11853,7 +14780,10 @@ sub usage { return( $usage ) ; } -sub tests_usage { +sub tests_usage +{ + note( 'Entering tests_usage()' ) ; + my $usage ; like( $usage = usage( $sync ), qr/Name:/, 'usage: contains Name:' ) ; myprint( $usage ) ; @@ -11862,13 +14792,16 @@ sub tests_usage { like( $usage, qr/imapsync/, 'usage: contains imapsync' ) ; is( undef, usage( ), 'usage: no args => undef' ) ; + + note( 'Leaving tests_usage()' ) ; return ; } -sub usage_footer { +sub usage_footer +{ my $mysync = shift ; - + my $footer = q{} ; my $localhost_info = localhost_info( $mysync ) ; @@ -11876,11 +14809,11 @@ sub usage_footer { my $homepage = homepage( ) ; my $imapsync_release = $STR_use_releasecheck ; - + if ( $mysync->{releasecheck} ) { $imapsync_release = check_last_release( ) ; } - + $footer = qq{$localhost_info $rcs $imapsync_release @@ -11891,7 +14824,8 @@ $homepage -sub usage_complete { +sub usage_complete +{ # Unused, I guess this function could be deleted my $usage = <<'EOF' ; --skipheader reg : Don't take into account header keyword @@ -11909,14 +14843,15 @@ sub usage_complete { --reconnectretry2 int : same as --reconnectretry1 but for host2 --split1 int : split the requests in several parts on host1. int is the number of messages handled per request. - default is like --split1 500. + default is like --split1 100. --split2 int : same thing on host2. --nofixInboxINBOX : Don't fix Inbox INBOX mapping. EOF return( $usage ) ; } -sub myGetOptions { +sub myGetOptions +{ # Started as a copy of Luke Ross Getopt::Long::CGI # https://metacpan.org/release/Getopt-Long-CGI @@ -11926,7 +14861,7 @@ sub myGetOptions { my $mysync = shift @ARG ; my $arguments_ref = shift @ARG ; my %options = @ARG ; - + my $mycgi = $mysync->{cgi} ; if ( not under_cgi_context() ) { @@ -11936,7 +14871,7 @@ sub myGetOptions { } # We must be in CGI context now - if ( !defined( $mycgi ) ) { return ; } + if ( ! defined( $mycgi ) ) { return ; } my $badthings = 0 ; foreach my $key ( sort keys %options ) { @@ -11986,20 +14921,38 @@ sub myGetOptions { } if ( ( $3 || q{} ) eq '@' ) { @{ ${$val} } = @values ; + my @option = map +( "--$name", "$_" ), @values ; + push @{ $mysync->{ cmdcgi } }, @option ; } elsif ( ref( $val ) eq 'ARRAY' ) { @{$val} = @values ; } - else { - ${$val} = $values[0] ; + elsif ( my $value = $values[0] ) + { + ${$val} = $value ; + push @{ $mysync->{ cmdcgi } }, "--$name", $value ; + } + else + { + } } } - else { + else + { # Checkbox # Considers only --name # Should consider also --no-name and --noname - ${$val} = $mycgi->param( $name ) ? 1 : undef ; + my $value = $mycgi->param( $name ) ; + if ( $value ) + { + ${$val} = 1 ; + push @{ $mysync->{ cmdcgi } }, "--$name" ; + } + else + { + ${$val} = undef ; + } } } if ( $badthings ) { @@ -12013,7 +14966,8 @@ sub myGetOptions { my @blabla ; # just used to check get_options_cgi() with an array -sub tests_get_options_cgi_context { +sub tests_get_options_cgi_context +{ note( 'Entering tests_get_options_cgi()' ) ; # Temporary, have to think harder about testing CGI context in command line --tests @@ -12051,7 +15005,7 @@ sub tests_get_options_cgi_context { is( 36, get_options( $mysync, ), 'get_options cgi context: QUERY_STRING => 36' ) ; is( 'test1', $mysync->{user1}, 'get_options cgi context: $mysync->{user1} => test1' ) ; #local $ENV{'QUERY_STRING'} = undef ; - + # Testing @ $mysync->{cgi} = CGI->new( 'blabla=fd1' ) ; get_options( $mysync ) ; @@ -12063,15 +15017,15 @@ sub tests_get_options_cgi_context { # Testing s@ as ref $mysync->{cgi} = CGI->new( 'folder=fd1' ) ; get_options( $mysync ) ; - is_deeply( [ 'fd1' ], $mysync->{folder}, 'get_options cgi context: $mysync->{folder} => fd1' ) ; + is_deeply( [ 'fd1' ], $mysync->{ folder }, 'get_options cgi context: $mysync->{ folder } => fd1' ) ; $mysync->{cgi} = CGI->new( 'folder=fd1&folder=fd2' ) ; get_options( $mysync ) ; - is_deeply( [ 'fd1', 'fd2' ], $mysync->{folder}, 'get_options cgi context: $mysync->{folder} => fd1, fd2' ) ; + is_deeply( [ 'fd1', 'fd2' ], $mysync->{ folder }, 'get_options cgi context: $mysync->{ folder } => fd1, fd2' ) ; # Testing % $mysync->{cgi} = CGI->new( 'f1f2h=s1=d1&f1f2h=s2=d2&f1f2h=s3=d3' ) ; get_options( $mysync ) ; - + is_deeply( { 's1' => 'd1', 's2' => 'd2', 's3' => 'd3' }, $mysync->{f1f2h}, 'get_options cgi context: f1f2h => s1=d1 s2=d2 s3=d3' ) ; @@ -12083,29 +15037,36 @@ sub tests_get_options_cgi_context { $mysync->{cgi} = CGI->new( 'host1=example.com' ) ; get_options( $mysync ) ; is( 'example.com', $mysync->{host1}, 'get_options cgi context: --host1=example.com => $mysync->{host1} => example.com' ) ; - + #myprint( Data::Dumper->Dump( [ $mysync ] ) ) ; $mysync->{cgi} = CGI->new( 'simulong=' ) ; get_options( $mysync ) ; is( undef, $mysync->{simulong}, 'get_options cgi context: --simulong= => $mysync->{simulong} => undef' ) ; - + $mysync->{cgi} = CGI->new( 'simulong' ) ; get_options( $mysync ) ; is( undef, $mysync->{simulong}, 'get_options cgi context: --simulong => $mysync->{simulong} => undef' ) ; - + $mysync->{cgi} = CGI->new( 'simulong=4' ) ; get_options( $mysync ) ; is( 4, $mysync->{simulong}, 'get_options cgi context: --simulong=4 => $mysync->{simulong} => 4' ) ; - is( undef, $mysync->{folder}, 'get_options cgi context: --simulong=4 => $mysync->{folder} => undef' ) ; + is( undef, $mysync->{ folder }, 'get_options cgi context: --simulong=4 => $mysync->{ folder } => undef' ) ; #myprint( Data::Dumper->Dump( [ $mysync ] ) ) ; - + + $mysync ={} ; + $mysync->{cgi} = CGI->new( 'justfoldersizes=on' ) ; + get_options( $mysync ) ; + is( 1, $mysync->{ justfoldersizes }, 'get_options cgi context: --justfoldersizes=1 => justfoldersizes => 1' ) ; + myprint( Data::Dumper->Dump( [ $mysync ] ) ) ; + note( 'Leaving tests_get_options_cgi_context()' ) ; return ; } -sub get_options_cgi { +sub get_options_cgi +{ # In CGI context arguments are not in @ARGV but in QUERY_STRING variable (with GET). my $mysync = shift @ARG ; $mysync->{cgi} || return ; @@ -12144,19 +15105,28 @@ sub get_options_cgi { 'domino2' => \$mysync->{domino2}, 'f1f2=s@' => \$mysync->{f1f2}, 'f1f2h=s%' => \$mysync->{f1f2h}, - 'folder=s@' => \$mysync->{folder}, + 'folder=s@' => \$mysync->{ folder }, 'blabla=s' => \@blabla, 'testslive!' => \$mysync->{testslive}, 'testslive6!' => \$mysync->{testslive6}, 'releasecheck!' => \$mysync->{releasecheck}, 'simulong=i' => \$mysync->{simulong}, + 'debugsleep=f' => \$mysync->{debugsleep}, + 'subfolder1=s' => \$mysync->{ subfolder1 }, + 'subfolder2=s' => \$mysync->{ subfolder2 }, + 'justfolders!' => \$mysync->{ justfolders }, + 'justfoldersizes!' => \$mysync->{ justfoldersizes }, + 'delete1!' => \$mysync->{ delete1 }, + 'delete2!' => \$mysync->{ delete2 }, + 'delete2duplicates!' => \$mysync->{ delete2duplicates }, + 'tail!' => \$mysync->{tail}, -# blabla and f1f2h=s% could be removed but -# tests_get_options_cgi() should be split before +# blabla and f1f2h=s% could be removed but +# tests_get_options_cgi() should be split before # with a sub tests_myGetOptions() ) ; - $debug and output( $mysync, "get options: [$opt_ret][$numopt]\n" ) ; + $mysync->{ debug } and output( $mysync, "get options: [$opt_ret][$numopt]\n" ) ; if ( ! $opt_ret ) { return ; @@ -12164,23 +15134,24 @@ sub get_options_cgi { return $numopt ; } -sub get_options_cmd { - my $mysync = shift @ARG ; - my @arguments = @ARG ; - my $mycgi = $mysync->{cgi} ; - # final 0 is used to print usage when no option is given on command line +sub get_options_cmd +{ + my $mysync = shift @ARG ; + my @arguments = @ARG ; + my $mycgi = $mysync->{cgi} ; + # final 0 is used to print usage when no option is given on command line my $numopt = scalar @arguments || 0 ; my $argv = join "\x00", @arguments ; if ( $argv =~ m/-delete\x002/x ) { - output( $mysync, "May be you mean --delete2 instead of --delete 2\n" ) ; + output( $mysync, "May be you mean --delete2 instead of --delete 2\n" ) ; return ; } $mysync->{f1f2h} = {} ; my $opt_ret = myGetOptions( - $mysync, - \@arguments, - 'debug!' => \$debug, + $mysync, + \@arguments, + 'debug!' => \$mysync->{ debug }, 'debuglist!' => \$debuglist, 'debugcontent!' => \$debugcontent, 'debugsleep=f' => \$mysync->{debugsleep}, @@ -12193,10 +15164,11 @@ sub get_options_cmd { 'debugfolders!' => \$mysync->{debugfolders}, 'debugssl=i' => \$mysync->{debugssl}, 'debugcgi!' => \$debugcgi, - 'debugenv' => \$mysync->{debugenv}, - 'debugsig' => \$mysync->{debugsig}, + 'debugenv!' => \$mysync->{debugenv}, + 'debugsig!' => \$mysync->{debugsig}, + 'debuglabels!' => \$mysync->{debuglabels}, 'simulong=i' => \$mysync->{simulong}, - 'abort' => \$mysync->{abort}, + 'abort' => \$mysync->{abort}, 'host1=s' => \$mysync->{host1}, 'host2=s' => \$mysync->{host2}, 'port1=i' => \$mysync->{port1}, @@ -12205,59 +15177,66 @@ sub get_options_cmd { 'inet6|ipv6' => \$mysync->{inet6}, 'user1=s' => \$mysync->{user1}, 'user2=s' => \$mysync->{user2}, - 'gmail1' => \$mysync->{gmail1}, - 'gmail2' => \$mysync->{gmail2}, - 'office1' => \$mysync->{office1}, - 'office2' => \$mysync->{office2}, - 'exchange1' => \$mysync->{exchange1}, - 'exchange2' => \$mysync->{exchange2}, - 'domino1' => \$mysync->{domino1}, - 'domino2' => \$mysync->{domino2}, + 'gmail1' => \$mysync->{gmail1}, + 'gmail2' => \$mysync->{gmail2}, + 'office1' => \$mysync->{office1}, + 'office2' => \$mysync->{office2}, + 'exchange1' => \$mysync->{exchange1}, + 'exchange2' => \$mysync->{exchange2}, + 'domino1' => \$mysync->{domino1}, + 'domino2' => \$mysync->{domino2}, 'domain1=s' => \$domain1, 'domain2=s' => \$domain2, 'password1=s' => \$mysync->{password1}, 'password2=s' => \$mysync->{password2}, - 'passfile1=s' => \$passfile1, - 'passfile2=s' => \$passfile2, + 'passfile1=s' => \$mysync->{ passfile1 }, + 'passfile2=s' => \$mysync->{ passfile2 }, 'authmd5!' => \$authmd5, 'authmd51!' => \$authmd51, 'authmd52!' => \$authmd52, - 'sep1=s' => \$sep1, - 'sep2=s' => \$sep2, - 'folder=s@' => \$mysync->{folder}, + 'sep1=s' => \$mysync->{ sep1 }, + 'sep2=s' => \$mysync->{ sep2 }, + 'sanitize!' => \$mysync->{ sanitize }, + 'folder=s@' => \$mysync->{ folder }, 'folderrec=s' => \@folderrec, 'include=s' => \@include, 'exclude=s' => \@exclude, + 'noexclude' => \$mysync->{noexclude}, 'folderfirst=s' => \@folderfirst, 'folderlast=s' => \@folderlast, 'prefix1=s' => \$prefix1, 'prefix2=s' => \$prefix2, - 'subfolder2=s' => \$subfolder2, - 'fixslash2!' => \$fixslash2, + 'subfolder1=s' => \$mysync->{ subfolder1 }, + 'subfolder2=s' => \$mysync->{ subfolder2 }, + 'fixslash2!' => \$mysync->{ fixslash2 }, 'fixInboxINBOX!' => \$fixInboxINBOX, - 'regextrans2=s' => \@regextrans2, + 'regextrans2=s@' => \$mysync->{ regextrans2 }, 'mixfolders!' => \$mixfolders, 'skipemptyfolders!' => \$skipemptyfolders, 'regexmess=s' => \@regexmess, + 'noregexmess' => \$mysync->{noregexmess}, 'skipmess=s' => \@skipmess, 'pipemess=s' => \@pipemess, 'pipemesscheck!' => \$pipemesscheck, 'disarmreadreceipts!' => \$disarmreadreceipts, 'regexflag=s' => \@regexflag, + 'noregexflag' => \$mysync->{noregexflag}, 'filterflags!' => \$filterflags, 'flagscase!' => \$flagscase, 'syncflagsaftercopy!' => \$syncflagsaftercopy, - 'resyncflags!' => \$mysync->{resyncflags}, - 'delete|delete1!' => \$delete1, - 'delete2!' => \$delete2, - 'delete2duplicates!' => \$delete2duplicates, + 'resyncflags!' => \$mysync->{ resyncflags }, + 'synclabels!' => \$mysync->{ synclabels }, + 'resynclabels!' => \$mysync->{ resynclabels }, + 'delete|delete1!' => \$mysync->{ delete1 }, + 'delete2!' => \$mysync->{ delete2 }, + 'delete2duplicates!' => \$mysync->{ delete2duplicates }, 'delete2folders!' => \$delete2folders, 'delete2foldersonly=s' => \$delete2foldersonly, 'delete2foldersbutnot=s' => \$delete2foldersbutnot, 'syncinternaldates!' => \$syncinternaldates, 'idatefromheader!' => \$idatefromheader, 'syncacls!' => \$syncacls, - 'maxsize=i' => \$maxsize, + 'maxsize=i' => \$mysync->{ maxsize }, 'minsize=i' => \$minsize, 'maxage=i' => \$maxage, 'minage=i' => \$minage, @@ -12267,15 +15246,15 @@ sub get_options_cmd { 'foldersizes!' => \$foldersizes, 'foldersizesatend!' => \$foldersizesatend, 'dry!' => \$mysync->{dry}, - 'expunge1|expunge!' => \$expunge1, - 'expunge2!' => \$expunge2, - 'uidexpunge2!' => \$uidexpunge2, - 'subscribed!' => \$subscribed, + 'expunge1|expunge!' => \$mysync->{ expunge1 }, + 'expunge2!' => \$mysync->{ expunge2 }, + 'uidexpunge2!' => \$mysync->{ uidexpunge2 }, + 'subscribed' => \$subscribed, 'subscribe!' => \$subscribe, 'subscribeall|subscribe_all!' => \$subscribeall, 'justbanner!' => \$justbanner, - 'justfolders!'=> \$justfolders, - 'justfoldersizes!' => \$justfoldersizes, + 'justfolders!'=> \$mysync->{ justfolders }, + 'justfoldersizes!' => \$mysync->{ justfoldersizes }, 'fast!' => \$fast, 'version' => \$mysync->{version}, 'help' => \$help, @@ -12290,7 +15269,7 @@ sub get_options_cmd { 'allowsizemismatch!' => \$allowsizemismatch, 'fastio1!' => \$fastio1, 'fastio2!' => \$fastio2, - 'sslcheck!' => \$mysync->{sslcheck}, + 'sslcheck!' => \$mysync->{sslcheck}, 'ssl1!' => \$mysync->{ssl1}, 'ssl2!' => \$mysync->{ssl2}, 'ssl1_ssl_version=s' => \$mysync->{h1}->{sslargs}->{SSL_version}, @@ -12314,7 +15293,7 @@ sub get_options_cmd { 'reconnectretry2=i' => \$reconnectretry2, 'tests!' => \$mysync->{ tests }, 'testsdebug|tests_debug!' => \$mysync->{ testsdebug }, - 'testsunit=s@' => \$mysync->{testsunit}, + 'testsunit=s@' => \$mysync->{testsunit}, 'testslive!' => \$mysync->{testslive}, 'testslive6!' => \$mysync->{testslive6}, 'justlogin!' => \$mysync->{justlogin}, @@ -12332,11 +15311,11 @@ sub get_options_cmd { 'debugcache!' => \$debugcache, 'useuid!' => \$useuid, 'addheader!' => \$mysync->{addheader}, - 'exitwhenover=i' => \$exitwhenover, + 'exitwhenover=i' => \$mysync->{ exitwhenover }, 'checkselectable!' => \$mysync->{ checkselectable }, 'checkfoldersexist!' => \$mysync->{ checkfoldersexist }, 'checkmessageexists!' => \$checkmessageexists, - 'expungeaftereach!' => \$expungeaftereach, + 'expungeaftereach!' => \$mysync->{ expungeaftereach }, 'abletosearch!' => \$mysync->{abletosearch}, 'abletosearch1!' => \$mysync->{abletosearch1}, 'abletosearch2!' => \$mysync->{abletosearch2}, @@ -12349,11 +15328,12 @@ sub get_options_cmd { 'create_folder_old!' => \$create_folder_old, 'maxmessagespersecond=f' => \$mysync->{maxmessagespersecond}, 'maxbytespersecond=i' => \$mysync->{maxbytespersecond}, - 'maxbytesafter=i' => \$mysync->{maxbytesafter}, - 'maxsleep=f' => \$mysync->{maxsleep}, + 'maxbytesafter=i' => \$mysync->{maxbytesafter}, + 'maxsleep=f' => \$mysync->{maxsleep}, 'skipcrossduplicates!' => \$skipcrossduplicates, 'debugcrossduplicates!' => \$debugcrossduplicates, 'log!' => \$mysync->{log}, + 'tail!' => \$mysync->{tail}, 'logfile=s' => \$mysync->{logfile}, 'logdir=s' => \$mysync->{logdir}, 'errorsmax=i' => \$mysync->{errorsmax}, @@ -12363,27 +15343,33 @@ sub get_options_cmd { 'justautomap!' => \$mysync->{justautomap}, 'id!' => \$mysync->{id}, 'f1f2=s@' => \$mysync->{f1f2}, + 'nof1f2' => \$mysync->{nof1f2}, 'f1f2h=s%' => \$mysync->{f1f2h}, 'justfolderlists!' => \$mysync->{justfolderlists}, 'delete1emptyfolders' => \$mysync->{delete1emptyfolders}, ) ; #myprint( Data::Dumper->Dump( [ $mysync ] ) ) ; - $debug and output( $mysync, "get options: [$opt_ret][$numopt]\n" ) ; + $mysync->{ debug } and output( $mysync, "get options: [$opt_ret][$numopt]\n" ) ; my $numopt_after = scalar @arguments ; #myprint( "get options: [$opt_ret][$numopt][$numopt_after]\n" ) ; if ( $numopt_after ) { - myprint( "Extra arguments found: @arguments\n", "It usually means a quoting issue in the command line\n" ) ; + myprint( + "Extra arguments found: @arguments\n", + "It usually means a quoting issue in the command line ", + "or some misspelling options.\n", + ) ; return ; } - if ( ! $opt_ret ) { - return ; - } - return $numopt ; + if ( ! $opt_ret ) { + return ; + } + return $numopt ; } - -sub tests_get_options { + +sub tests_get_options +{ note( 'Entering tests_get_options()' ) ; # CAVEAT: still setting global variables, be careful @@ -12397,45 +15383,46 @@ sub tests_get_options { # * options not known # * --delete 2 input # * number of arguments or QUERY_STRING length - my $mysync3 = { } ; - is( undef, get_options( $mysync3, qw( --noexist ) ), 'get_options: --noexist => undef' ) ; - is( undef, $mysync3->{ noexist }, 'get_options: --noexist => undef' ) ; - $mysync3 = { } ; - is( undef, get_options( $mysync3, qw( --lalala --noexist --version ) ), 'get_options: --lalala --noexist --version => undef' ) ; - is( 1, $mysync3->{ version }, 'get_options: --version => 1' ) ; - is( undef, $mysync3->{ noexist }, 'get_options: --noexist => undef' ) ; - $mysync3 = { } ; - is( 1, get_options( $mysync3, qw( --delete2 ) ), 'get_options: --delete2 => 1' ) ; - is( 1, $delete2, 'get_options: --delete2 => $delete2 = 1' ) ; - $mysync3 = { } ; - is( undef, get_options( $mysync3, qw( --delete 2 ) ), 'get_options: --delete 2 => undef' ) ; - is( undef, $delete1, 'get_options: --delete 2 => $delete1 still undef ; good!' ) ; - $mysync3 = { } ; - is( undef, get_options( $mysync3, "--delete 2" ), 'get_options: --delete 2 => undef' ) ; + my $mysync = { } ; + is( undef, get_options( $mysync, qw( --noexist ) ), 'get_options: --noexist => undef' ) ; + is( undef, $mysync->{ noexist }, 'get_options: --noexist => undef' ) ; + $mysync = { } ; + is( undef, get_options( $mysync, qw( --lalala --noexist --version ) ), 'get_options: --lalala --noexist --version => undef' ) ; + is( 1, $mysync->{ version }, 'get_options: --version => 1' ) ; + is( undef, $mysync->{ noexist }, 'get_options: --noexist => undef' ) ; + $mysync = { } ; + is( 1, get_options( $mysync, qw( --delete2 ) ), 'get_options: --delete2 => 1' ) ; + is( 1, $mysync->{ delete2 }, 'get_options: --delete2 => var delete2 = 1' ) ; + $mysync = { } ; + is( undef, get_options( $mysync, qw( --delete 2 ) ), 'get_options: --delete 2 => var undef' ) ; + is( undef, $mysync->{ delete1 }, 'get_options: --delete 2 => var still undef ; good!' ) ; + $mysync = { } ; + is( undef, get_options( $mysync, "--delete 2" ), 'get_options: --delete 2 => undef' ) ; - is( 1, get_options( $mysync3, "--version" ), 'get_options: --version => 1' ) ; - is( 1, get_options( $mysync3, "--help" ), 'get_options: --help => 1' ) ; - - is( undef, get_options( $mysync3, qw( --noexist --version ) ), 'get_options: --debug --noexist --version => undef' ) ; - is( 1, get_options( $mysync3, qw( --version ) ), 'get_options: --version => 1' ) ; - is( undef, get_options( $mysync3, qw( extra ) ), 'get_options: extra => undef' ) ; - is( undef, get_options( $mysync3, qw( extra1 --version extra2 ) ), 'get_options: extra1 --version extra2 => undef' ) ; + is( 1, get_options( $mysync, "--version" ), 'get_options: --version => 1' ) ; + is( 1, get_options( $mysync, "--help" ), 'get_options: --help => 1' ) ; + + is( undef, get_options( $mysync, qw( --noexist --version ) ), 'get_options: --debug --noexist --version => undef' ) ; + is( 1, get_options( $mysync, qw( --version ) ), 'get_options: --version => 1' ) ; + is( undef, get_options( $mysync, qw( extra ) ), 'get_options: extra => undef' ) ; + is( undef, get_options( $mysync, qw( extra1 --version extra2 ) ), 'get_options: extra1 --version extra2 => undef' ) ; + + $mysync = { } ; + is( 2, get_options( $mysync, qw( --host1 HOST_01) ), 'get_options: --host1 HOST_01 => 1' ) ; + is( 'HOST_01', $mysync->{ host1 }, 'get_options: --host1 HOST_01 => HOST_01' ) ; + #myprint( Data::Dumper->Dump( [ $mysync ] ) ) ; - $mysync3 = { } ; - is( 2, get_options( $mysync3, qw( --host1 HOST_01) ), 'get_options: --host1 HOST_01 => 1' ) ; - is( 'HOST_01', $mysync3->{ host1 }, 'get_options: --host1 HOST_01 => HOST_01' ) ; - #myprint( Data::Dumper->Dump( [ $mysync3 ] ) ) ; - note( 'Leaving tests_get_options()' ) ; return ; } -sub get_options { +sub get_options +{ my $mysync = shift @ARG ; my @arguments = @ARG ; - #myprint( "1 mysync: ", Data::Dumper->Dump( [ $mysync ] ) ) ; + #myprint( "1 mysync: ", Data::Dumper->Dump( [ $mysync ] ) ) ; my $ret ; if ( under_cgi_context( ) ) { # CGI context @@ -12444,13 +15431,13 @@ sub get_options { # Command line context ; $ret = get_options_cmd( $mysync, @arguments ) ; } ; - #myprint( "2 mysync: ", Data::Dumper->Dump( [ $mysync ] ) ) ; + #myprint( "2 mysync: ", Data::Dumper->Dump( [ $mysync ] ) ) ; foreach my $key ( sort keys %{ $mysync } ) { if ( ! defined $mysync->{$key} ) { delete $mysync->{$key} ; next ; } - if ( 'ARRAY' eq ref( $mysync->{$key} ) + if ( 'ARRAY' eq ref( $mysync->{$key} ) and 0 == scalar( @{ $mysync->{$key} } ) ) { delete $mysync->{$key} ; } @@ -12458,14 +15445,15 @@ sub get_options { return $ret ; } -sub testunitsession { +sub testunitsession +{ my $mysync = shift ; - + if ( ! $mysync ) { return ; } if ( ! $mysync->{ testsunit } ) { return ; } my @functions = @{ $mysync->{ testsunit } } ; - + if ( ! @functions ) { return ; } SKIP: { @@ -12476,7 +15464,8 @@ sub testunitsession { return ; } -sub tests_count_0s { +sub tests_count_0s +{ note( 'Entering tests_count_zeros()' ) ; is( 0, count_0s( ), 'count_0s: no parameters => 0' ) ; is( 1, count_0s( 0 ), 'count_0s: 0 => 1' ) ; @@ -12486,17 +15475,20 @@ sub tests_count_0s { note( 'Leaving tests_count_zeros()' ) ; return ; } -sub count_0s { +sub count_0s +{ my @array = @ARG ; - + if ( ! @array ) { return 0 ; } my $nb_zeros = 0 ; map { $_ == 0 and $nb_zeros += 1 } @array ; return $nb_zeros ; } -sub tests_report_failures { +sub tests_report_failures +{ note( 'Entering tests_report_failures()' ) ; + is( undef, report_failures( ), 'report_failures: no parameters => undef' ) ; is( "nb 1 - first\n", report_failures( ({'ok' => 0, name => 'first'}) ), 'report_failures: "first" failed => nb 1 - first' ) ; is( q{}, report_failures( ( {'ok' => 1, name => 'first'} ) ), 'report_failures: "first" success =>' ) ; @@ -12506,11 +15498,12 @@ sub tests_report_failures { return ; } -sub report_failures { +sub report_failures +{ my @details = @ARG ; - + if ( ! @details ) { return ; } - + my $counter = 1 ; my $report = q{} ; foreach my $details ( @details ) { @@ -12524,15 +15517,19 @@ sub report_failures { } -sub tests_true { +sub tests_true +{ note( 'Entering tests_true()' ) ; + is( 1, 1, 'true: 1 is 1' ) ; note( 'Leaving tests_true()' ) ; return ; } -sub tests_testsunit { +sub tests_testsunit +{ note( 'Entering tests_testunit()' ) ; + is( undef, testsunit( ), 'testsunit: no parameters => undef' ) ; is( undef, testsunit( undef ), 'testsunit: an undef parameter => undef' ) ; is( undef, testsunit( q{} ), 'testsunit: an empty parameter => undef' ) ; @@ -12542,139 +15539,63 @@ sub tests_testsunit { return ; } -sub testsunit { +sub testsunit +{ my @functions = @ARG ; - + if ( ! @functions ) { # myprint( "testsunit warning: no argument given\n" ) ; - return ; + return ; } - + foreach my $function ( @functions ) { if ( ! $function ) { myprint( "testsunit warning: argument is empty\n" ) ; - next ; + next ; } if ( ! exists &$function ) { myprint( "testsunit warning: function $function does not exist\n" ) ; - next ; + next ; } if ( ! defined &$function ) { myprint( "testsunit warning: function $function is not defined\n" ) ; - next ; + next ; } my $function_ref = \&{ $function } ; - &$function_ref() ; + &$function_ref() ; } return ; } -sub testsdebug { - my $mysync = shift ; - if ( ! $mysync->{ testsdebug } ) { return ; } - SKIP: { - if ( ! $mysync->{ testsdebug } ) { - skip 'No test in normal run' ; - } - - note( 'Entering testsdebug()' ) ; - ok( ( ( not -d 'W/tmp/tests' ) or rmtree( 'W/tmp/tests/' ) ), 'testsdebug: rmtree W/tmp/tests' ) ; - #tests_bytes_display_string( ) ; - #tests_ucsecond( ) ; - #tests_mkpath( ) ; - #tests_format_for_imap_arg( ) ; - #tests_is_a_release_number( ) ; - #tests_delete1emptyfolders( ) ; - #tests_memory_consumption( ) ; - #tests_imap2_folder_name() ; - #tests_length_ref( ) ; - #tests_diff_or_NA( ) ; - #tests_match_number( ) ; - #tests_all_defined( ) ; - #tests_guess_separator( ) ; - #tests_message_for_host2( ) ; - #tests_special_from_folders_hash( ) ; - #tests_do_valid_directory( ) ; - #tests_notmatch( ) ; - #tests_match( ) ; - #tests_get_options( ) ; - #tests_rand32( ) ; - #tests_string_to_file( ) ; - #tests_hashsynclocal( ) ; - #tests_output( ) ; - #tests_output_reset_with( ) ; - #tests_output_start( ) ; - #tests_hashsync( ) ; - #tests_check_last_release( ) ; - #tests_cpu_number( ) ; - #tests_load_and_delay( ) ; - #tests_loadavg( ) ; - #tests_backtick( ) ; - #tests_firstline( ) ; - #tests_pipemess( ) ; - #tests_not_long_imapsync_version_public( ) ; - #tests_get_options_cgi( ) ; - #tests_guess_special( ) ; - ####tests_reconnect_if_needed( ) ; - #tests_reconnect_12_if_needed( ) ; - #tests_sleep_max_bytes( ) ; - #tests_file_to_string( ) ; - #tests_under_cgi_context( ) ; - #tests_umask( ) ; - #tests_umask_str( ) ; - #tests_set_umask( ) ; - #tests_createhashfileifneeded( ) ; - #tests_filter_forbidden_characters( ) ; - #tests_logfile( ) ; - #tests_setlogfile( ) ; - #tests_move_slash( ) ; - #tests_testsunit( ) ; - #tests_always_fail( ) ; - #tests_count_0s( ) ; - #tests_report_failures( ) ; - #tests_max( ) ; - #tests_min( ) ; - #tests_sleep_if_needed( ) ; - #tests_imapsping( ) ; - #tests_tcpping( ) ; - #tests_sslcheck( ) ; - #tests_resolv( ) ; - #tests_resolvrev( ) ; - #tests_connect_socket( ) ; - #tests_probe_imapssl( ) ; - #tests_mailimapclient_connect( ) ; - #tests_guess_prefix( ) ; - #tests_usage( ) ; - #tests_version_from_rcs( ) ; - #tests_mailimapclient_connect_bug( ) ; # it fails with Mail-IMAPClient <= 3.39 - #tests_backslash_caret( ) ; - tests_write_pidfile( ) ; - tests_remove_pidfile_not_running( ) ; - tests_match_a_pid_number( ) ; - note( 'Leaving testsdebug()' ) ; +sub testsdebug +{ + # Now a little obsolete since there is + # imapsync ... --testsunit "anyfunction" + my $mysync = shift ; + if ( ! $mysync->{ testsdebug } ) { return ; } + SKIP: { + if ( ! $mysync->{ testsdebug } ) { + skip 'No test in normal run' ; + } + + note( 'Entering testsdebug()' ) ; + ok( ( ( not -d 'W/tmp/tests' ) or rmtree( 'W/tmp/tests/' ) ), 'testsdebug: rmtree W/tmp/tests' ) ; + tests_check_binary_embed_all_dyn_libs( ) ; + note( 'Leaving testsdebug()' ) ; done_testing( ) ; } return ; } -sub tests_template { - note( 'Entering tests_template()' ) ; - is( undef, undef, 'template: undef is undef' ) ; - is_deeply( {}, {}, 'template: a hash is a hash' ) ; - is_deeply( [], [], 'template: an array is an array' ) ; - note( 'Leaving tests_template()' ) ; - return ; -} +sub tests +{ + my $mysync = shift ; + if ( ! $mysync->{ tests } ) { return ; } - -sub tests { - my $mysync = shift ; - if ( ! $mysync->{ tests } ) { return ; } - - SKIP: { + SKIP: { skip 'No test in normal run' if ( ! $mysync->{ tests } ) ; - note( 'Entering tests()' ) ; + note( 'Entering tests()' ) ; tests_folder_routines( ) ; tests_compare_lists( ) ; tests_regexmess( ) ; @@ -12797,12 +15718,54 @@ sub tests { tests_template( ) ; tests_split_around_equal( ) ; tests_toggle_sleep( ) ; + tests_labels( ) ; + tests_synclabels( ) ; + tests_uidexpunge_or_expunge( ) ; + tests_appendlimit_from_capability( ) ; + tests_maxsize_setting( ) ; + tests_mock_capability( ) ; + tests_appendlimit( ) ; + tests_capability_of( ) ; + tests_search_in_array( ) ; + tests_operators_and_exclam_precedence( ) ; + tests_teelaunch( ) ; + tests_logfileprepa( ) ; + tests_useheader_suggestion( ) ; + tests_nb_messages_in_2_not_in_1( ) ; + tests_labels_add_subfolder2( ) ; + tests_labels_remove_subfolder1( ) ; + tests_resynclabels( ) ; + tests_labels_remove_special( ) ; + tests_uniq( ) ; + tests_remove_from_requested_folders( ) ; + tests_errors_log( ) ; + tests_add_subfolder1_to_folderrec( ) ; + tests_sanitize_subfolder( ) ; + tests_remove_edging_blanks( ) ; + tests_sanitize( ) ; + tests_remove_last_char_if_is( ) ; + tests_check_binary_embed_all_dyn_libs( ) ; + tests_nthline( ) ; + tests_secondline( ) ; + tests_tail( ) ; #tests_always_fail( ) ; - done_testing( 1181 ) ; + done_testing( 1441 ) ; note( 'Leaving tests()' ) ; } return ; } +sub tests_template +{ + note( 'Entering tests_template()' ) ; + + is( undef, undef, 'template: undef is undef' ) ; + is_deeply( {}, {}, 'template: a hash is a hash' ) ; + is_deeply( [], [], 'template: an array is an array' ) ; + note( 'Leaving tests_template()' ) ; + return ; +} + + diff --git a/data/Dockerfiles/dovecot/imapsync_cron.pl b/data/Dockerfiles/dovecot/imapsync_cron.pl old mode 100755 new mode 100644 index 4fad97ab..2b61545e --- a/data/Dockerfiles/dovecot/imapsync_cron.pl +++ b/data/Dockerfiles/dovecot/imapsync_cron.pl @@ -5,11 +5,11 @@ use LockFile::Simple qw(lock trylock unlock); use Proc::ProcessTable; use Data::Dumper qw(Dumper); use IPC::Run 'run'; -use String::Util 'trim'; use File::Temp; use Try::Tiny; use sigtrap 'handler' => \&sig_handler, qw(INT TERM KILL QUIT); +sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s }; my $t = Proc::ProcessTable->new; my $imapsync_running = grep { $_->{cmndline} =~ /^\/usr\/bin\/perl \/usr\/local\/bin\/imapsync\s/ } @{$t->table}; if ($imapsync_running eq 1) @@ -19,11 +19,20 @@ if ($imapsync_running eq 1) } sub qqw($) { - my @values = split('(?=--)', $_[0]); + my @params = (); + my @values = split(/(?=--)/, $_[0]); foreach my $val (@values) { + my @tmpparam = split(/ /, $val, 2); + foreach my $tmpval (@tmpparam) { + if ($tmpval ne '') { + push @params, $tmpval; + } + } + } + foreach my $val (@params) { $val=trim($val); } - return @values + return @params; } $run_dir="/tmp"; @@ -101,10 +110,6 @@ while ($row = $sth->fetchrow_arrayref()) { $timeout1 = @$row[19]; $timeout2 = @$row[20]; - $is_running = $dbh->prepare("UPDATE imapsync SET is_running = 1 WHERE id = ?"); - $is_running->bind_param( 1, ${id} ); - $is_running->execute(); - if ($enc1 eq "TLS") { $enc1 = "--tls1"; } elsif ($enc1 eq "SSL") { $enc1 = "--ssl1"; } else { undef $enc1; } my $template = $run_dir . '/imapsync.XXXXXXX'; @@ -118,43 +123,53 @@ while ($row = $sth->fetchrow_arrayref()) { my $custom_params_ref = \@custom_params_a; my $generated_cmds = [ "/usr/local/bin/imapsync", - "--tmpdir", "/tmp", - "--nofoldersizes", - ($timeout1 gt "0" ? () : ('--timeout1', $timeout1)), - ($timeout2 gt "0" ? () : ('--timeout2', $timeout2)), - ($exclude eq "" ? () : ("--exclude", $exclude)), - ($subfolder2 eq "" ? () : ('--subfolder2', $subfolder2)), - ($maxage eq "0" ? () : ('--maxage', $maxage)), - ($maxbytespersecond eq "0" ? () : ('--maxbytespersecond', $maxbytespersecond)), - ($delete2duplicates ne "1" ? () : ('--delete2duplicates')), - ($subscribeall ne "1" ? () : ('--subscribeall')), - ($delete1 ne "1" ? () : ('--delete')), + "--tmpdir", "/tmp", + "--nofoldersizes", + ($timeout1 gt "0" ? () : ('--timeout1', $timeout1)), + ($timeout2 gt "0" ? () : ('--timeout2', $timeout2)), + ($exclude eq "" ? () : ("--exclude", $exclude)), + ($subfolder2 eq "" ? () : ('--subfolder2', $subfolder2)), + ($maxage eq "0" ? () : ('--maxage', $maxage)), + ($maxbytespersecond eq "0" ? () : ('--maxbytespersecond', $maxbytespersecond)), + ($delete2duplicates ne "1" ? () : ('--delete2duplicates')), + ($subscribeall ne "1" ? () : ('--subscribeall')), + ($delete1 ne "1" ? () : ('--delete')), ($delete2 ne "1" ? () : ('--delete2')), ($automap ne "1" ? () : ('--automap')), ($skipcrossduplicates ne "1" ? () : ('--skipcrossduplicates')), - (!defined($enc1) ? () : ($enc1)), - "--host1", $host1, - "--user1", $user1, - "--passfile1", $passfile1->filename, - "--port1", $port1, - "--host2", "localhost", - "--user2", $user2 . '*' . trim($master_user), - "--passfile2", $passfile2->filename, - '--no-modulesversion']; + (!defined($enc1) ? () : ($enc1)), + "--host1", $host1, + "--user1", $user1, + "--passfile1", $passfile1->filename, + "--port1", $port1, + "--host2", "localhost", + "--user2", $user2 . '*' . trim($master_user), + "--passfile2", $passfile2->filename, + '--no-modulesversion', + '--noreleasecheck']; try { + $is_running = $dbh->prepare("UPDATE imapsync SET is_running = 1 WHERE id = ?"); + $is_running->bind_param( 1, ${id} ); + $is_running->execute(); + run [@$generated_cmds, @$custom_params_ref], '&>', \my $stdout; - $update = $dbh->prepare("UPDATE imapsync SET returned_text = ?, last_run = NOW(), is_running = 0 WHERE id = ?"); + + $update = $dbh->prepare("UPDATE imapsync SET returned_text = ? WHERE id = ?"); $update->bind_param( 1, ${stdout} ); $update->bind_param( 2, ${id} ); $update->execute(); } catch { - $update = $dbh->prepare("UPDATE imapsync SET returned_text = 'Could not start or finish imapsync', last_run = NOW(), is_running = 0 WHERE id = ?"); + $update = $dbh->prepare("UPDATE imapsync SET returned_text = 'Could not start or finish imapsync' WHERE id = ?"); + $update->bind_param( 1, ${id} ); + $update->execute(); + } finally { + $update = $dbh->prepare("UPDATE imapsync SET last_run = NOW(), is_running = 0 WHERE id = ?"); $update->bind_param( 1, ${id} ); $update->execute(); - $lockmgr->unlock($lock_file); }; + } $sth->finish(); diff --git a/data/Dockerfiles/dovecot/quarantine_notify.py b/data/Dockerfiles/dovecot/quarantine_notify.py index 28a7aabe..b1af332a 100755 --- a/data/Dockerfiles/dovecot/quarantine_notify.py +++ b/data/Dockerfiles/dovecot/quarantine_notify.py @@ -83,13 +83,14 @@ def notify_rcpt(rcpt, msg_count, quarantine_acl): msg.attach(html_part) msg['To'] = str(rcpt) text = msg.as_string() - server.sendmail(msg['From'], msg['To'], text) + server.sendmail(msg['From'].encode("ascii", errors="ignore"), msg['To'], text) server.quit() for res in meta_query: query_mysql('UPDATE quarantine SET notified = 1 WHERE id = "%d"' % (res['id']), update = True) r.hset('Q_LAST_NOTIFIED', record['rcpt'], time_now) break except Exception as ex: + server.quit() print '%s' % (ex) time.sleep(3) diff --git a/data/Dockerfiles/dovecot/quota_notify.py b/data/Dockerfiles/dovecot/quota_notify.py index f5df7639..669adec2 100755 --- a/data/Dockerfiles/dovecot/quota_notify.py +++ b/data/Dockerfiles/dovecot/quota_notify.py @@ -54,7 +54,7 @@ try: msg.attach(text_part) msg.attach(html_part) msg['To'] = username - p = Popen(['/usr/local/libexec/dovecot/dovecot-lda', '-d', username, '-o', '"plugin/quota=maildir:User quota:noenforcing"'], stdout=PIPE, stdin=PIPE, stderr=STDOUT) + p = Popen(['/usr/lib/dovecot/dovecot-lda', '-d', username, '-o', '"plugin/quota=maildir:User quota:noenforcing"'], stdout=PIPE, stdin=PIPE, stderr=STDOUT) p.communicate(input=msg.as_string()) except Exception as ex: diff --git a/data/Dockerfiles/dovecot/sa-rules.sh b/data/Dockerfiles/dovecot/sa-rules.sh index 0cea240c..d208722a 100755 --- a/data/Dockerfiles/dovecot/sa-rules.sh +++ b/data/Dockerfiles/dovecot/sa-rules.sh @@ -1,25 +1,41 @@ #!/bin/bash + +# Create temp directories +[[ ! -d /tmp/sa-rules-schaal ]] && mkdir -p /tmp/sa-rules-schaal [[ ! -d /tmp/sa-rules-heinlein ]] && mkdir -p /tmp/sa-rules-heinlein -if [[ ! -f /etc/rspamd/custom/sa-rules-heinlein ]]; then + +# Hash current SA rules +if [[ ! -f /etc/rspamd/custom/sa-rules ]]; then HASH_SA_RULES=0 else - HASH_SA_RULES=$(cat /etc/rspamd/custom/sa-rules-heinlein | md5sum | cut -d' ' -f1) + HASH_SA_RULES=$(cat /etc/rspamd/custom/sa-rules | md5sum | cut -d' ' -f1) fi -curl --connect-timeout 15 --max-time 30 http://www.spamassassin.heinlein-support.de/$(dig txt 1.4.3.spamassassin.heinlein-support.de +short | tr -d '"').tar.gz --output /tmp/sa-rules.tar.gz -if [[ -f /tmp/sa-rules.tar.gz ]]; then - tar xfvz /tmp/sa-rules.tar.gz -C /tmp/sa-rules-heinlein - # create complete list of rules in a single file - cat /tmp/sa-rules-heinlein/*cf > /etc/rspamd/custom/sa-rules-heinlein - # Only restart rspamd-mailcow when rules changed - if [[ $(cat /etc/rspamd/custom/sa-rules-heinlein | md5sum | cut -d' ' -f1) != ${HASH_SA_RULES} ]]; then - CONTAINER_NAME=rspamd-mailcow - CONTAINER_ID=$(curl --silent --insecure https://dockerapi/containers/json | \ - jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], id: .Id}" | \ - jq -rc "select( .name | tostring | contains(\"${CONTAINER_NAME}\")) | .id") - if [[ ! -z ${CONTAINER_ID} ]]; then - curl --silent --insecure -XPOST --connect-timeout 15 --max-time 120 https://dockerapi/containers/${CONTAINER_ID}/restart - fi +# Deploy +## Heinlein +curl --connect-timeout 15 --max-time 30 http://www.spamassassin.heinlein-support.de/$(dig txt 1.4.3.spamassassin.heinlein-support.de +short | tr -d '"').tar.gz --output /tmp/sa-rules-heinlein.tar.gz +if gzip -t /tmp/sa-rules-heinlein.tar.gz; then + tar xfvz /tmp/sa-rules-heinlein.tar.gz -C /tmp/sa-rules-heinlein + cat /tmp/sa-rules-heinlein/*cf > /etc/rspamd/custom/sa-rules +fi +## Schaal +curl --connect-timeout 15 --max-time 30 http://sa.schaal-it.net/$(dig txt 1.4.3.sa.schaal-it.net +short | tr -d '"').tar.gz --output /tmp/sa-rules-schaal.tar.gz +if gzip -t /tmp/sa-rules-schaal.tar.gz; then + tar xfvz /tmp/sa-rules-schaal.tar.gz -C /tmp/sa-rules-schaal + # Append, do not overwrite + cat /tmp/sa-rules-schaal/*cf >> /etc/rspamd/custom/sa-rules +fi + +if [[ "$(cat /etc/rspamd/custom/sa-rules | md5sum | cut -d' ' -f1)" != "${HASH_SA_RULES}" ]]; then + CONTAINER_NAME=rspamd-mailcow + CONTAINER_ID=$(curl --silent --insecure https://dockerapi/containers/json | \ + jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], id: .Id}" | \ + jq -rc "select( .name | tostring | contains(\"${CONTAINER_NAME}\")) | .id") + if [[ ! -z ${CONTAINER_ID} ]]; then + curl --silent --insecure -XPOST --connect-timeout 15 --max-time 120 https://dockerapi/containers/${CONTAINER_ID}/restart fi fi -rm -rf /tmp/sa-rules-heinlein /tmp/sa-rules.tar.gz + +# Cleanup +rm -rf /tmp/sa-rules-heinlein /tmp/sa-rules-heinlein.tar.gz +rm -rf /tmp/sa-rules-schaal /tmp/sa-rules-schaal.tar.gz diff --git a/data/Dockerfiles/dovecot/supervisord.conf b/data/Dockerfiles/dovecot/supervisord.conf index 2e3026a0..2d91b55a 100644 --- a/data/Dockerfiles/dovecot/supervisord.conf +++ b/data/Dockerfiles/dovecot/supervisord.conf @@ -12,7 +12,7 @@ stderr_logfile_maxbytes=0 autostart=true [program:dovecot] -command=/usr/local/sbin/dovecot -F +command=/usr/sbin/dovecot -F autorestart=true [program:cron] diff --git a/data/Dockerfiles/dovecot/syslog-ng.conf b/data/Dockerfiles/dovecot/syslog-ng.conf index d788d3e0..b4bc7156 100644 --- a/data/Dockerfiles/dovecot/syslog-ng.conf +++ b/data/Dockerfiles/dovecot/syslog-ng.conf @@ -31,10 +31,10 @@ destination d_redis_f2b_channel { ); }; filter f_mail { facility(mail); }; -filter f_not_watchdog { not message("172\.22\.1\.248"); }; +#filter f_not_watchdog { not message("172\.22\.1\.248"); }; log { source(s_src); - filter(f_not_watchdog); +# filter(f_not_watchdog); destination(d_stdout); filter(f_mail); destination(d_redis_ui_log); diff --git a/data/Dockerfiles/dovecot/trim_logs.sh b/data/Dockerfiles/dovecot/trim_logs.sh index 2fec55d3..b916827e 100755 --- a/data/Dockerfiles/dovecot/trim_logs.sh +++ b/data/Dockerfiles/dovecot/trim_logs.sh @@ -15,4 +15,4 @@ catch_non_zero "/usr/bin/redis-cli -h redis LTRIM NETFILTER_LOG 0 __LOG_LINES__" catch_non_zero "/usr/bin/redis-cli -h redis LTRIM AUTODISCOVER_LOG 0 __LOG_LINES__" catch_non_zero "/usr/bin/redis-cli -h redis LTRIM API_LOG 0 __LOG_LINES__" catch_non_zero "/usr/bin/redis-cli -h redis LTRIM RL_LOG 0 __LOG_LINES__" - +catch_non_zero "/usr/bin/redis-cli -h redis LTRIM WATCHDOG_LOG 0 __LOG_LINES__" diff --git a/data/Dockerfiles/netfilter/Dockerfile b/data/Dockerfiles/netfilter/Dockerfile index 92eaa39f..55594de6 100644 --- a/data/Dockerfiles/netfilter/Dockerfile +++ b/data/Dockerfiles/netfilter/Dockerfile @@ -1,13 +1,16 @@ -FROM alpine:3.9 +FROM alpine:3.10 LABEL maintainer "Andre Peters " ENV XTABLES_LIBDIR /usr/lib/xtables ENV PYTHON_IPTABLES_XTABLES_VERSION 12 ENV IPTABLES_LIBDIR /usr/lib -RUN apk add -U python2 python-dev py-pip gcc musl-dev iptables ip6tables tzdata \ - && pip2 install --upgrade python-iptables==0.13.0 redis ipaddress \ - && apk del python-dev py2-pip gcc +RUN echo 'http://dl-cdn.alpinelinux.org/alpine/v3.9/main' >> /etc/apk/repositories \ + && apk add --virtual .build-deps gcc python3-dev libffi-dev openssl-dev \ + && apk add -U python3 iptables=1.6.2-r1 ip6tables=1.6.2-r1 tzdata musl-dev \ + && pip3 install --upgrade pip python-iptables redis ipaddress dnspython \ +# && pip3 install --upgrade pip python-iptables==0.13.0 redis ipaddress dnspython \ + && apk del .build-deps COPY server.py / -CMD ["python2", "-u", "/server.py"] +CMD ["python3", "-u", "/server.py"] diff --git a/data/Dockerfiles/netfilter/server.py b/data/Dockerfiles/netfilter/server.py index f43122ea..78cafc68 100644 --- a/data/Dockerfiles/netfilter/server.py +++ b/data/Dockerfiles/netfilter/server.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 import re import os @@ -6,19 +6,22 @@ import time import atexit import signal import ipaddress +from collections import Counter from random import randint from threading import Thread from threading import Lock import redis import json import iptc +import dns.resolver +import dns.exception while True: try: r = redis.StrictRedis(host=os.getenv('IPV4_NETWORK', '172.22.1') + '.249', decode_responses=True, port=6379, db=0) r.ping() except Exception as ex: - print '%s - trying again in 3 seconds' % (ex) + print('%s - trying again in 3 seconds' % (ex)) time.sleep(3) else: break @@ -31,13 +34,34 @@ RULES[2] = '-login: Disconnected \(auth failed, .+\): user=.*, method=.+, rip=([ RULES[3] = '-login: Aborted login \(tried to use disallowed .+\): user=.+, rip=([0-9a-f\.:]+), lip.+' RULES[4] = 'SOGo.+ Login from \'([0-9a-f\.:]+)\' for user .+ might not have worked' RULES[5] = 'mailcow UI: Invalid password for .+ by ([0-9a-f\.:]+)' -#RULES[6] = '-login: Aborted login \(no auth .+\): user=.+, rip=([0-9a-f\.:]+), lip.+' +RULES[6] = '([0-9a-f\.:]+) \"GET \/SOGo\/.* HTTP.+\" 403 .+' +#RULES[7] = '-login: Aborted login \(no auth .+\): user=.+, rip=([0-9a-f\.:]+), lip.+' + +WHITELIST = [] +BLACKLIST= [] bans = {} -log = {} + quit_now = False lock = Lock() +def log(priority, message): + tolog = {} + tolog['time'] = int(round(time.time())) + tolog['priority'] = priority + tolog['message'] = message + r.lpush('NETFILTER_LOG', json.dumps(tolog, ensure_ascii=False)) + print(message) + +def logWarn(message): + log('warn', message) + +def logCrit(message): + log('crit', message) + +def logInfo(message): + log('info', message) + def refreshF2boptions(): global f2boptions global quit_now @@ -58,8 +82,8 @@ def refreshF2boptions(): try: f2boptions = {} f2boptions = json.loads(r.get('F2B_OPTIONS')) - except ValueError, e: - print 'Error loading F2B options: F2B_OPTIONS is not json' + except ValueError: + print('Error loading F2B options: F2B_OPTIONS is not json') quit_now = True if r.exists('F2B_LOG'): @@ -84,18 +108,10 @@ def mailcowChainOrder(): if item.target.name == 'MAILCOW': target_found = True if position != 0: - log['time'] = int(round(time.time())) - log['priority'] = 'crit' - log['message'] = 'Error in ' + chain.name + ' chain order, restarting container' - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print log['message'] + logCrit('Error in %s chain order, restarting container' % (chain.name)) quit_now = True if not target_found: - log['time'] = int(round(time.time())) - log['priority'] = 'crit' - log['message'] = 'Error in ' + chain.name + ' chain: MAILCOW target not found, restarting container' - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print log['message'] + logCrit('Error in %s chain: MAILCOW target not found, restarting container' % (chain.name)) quit_now = True def ban(address): @@ -106,28 +122,28 @@ def ban(address): RETRY_WINDOW = int(f2boptions['retry_window']) NETBAN_IPV4 = '/' + str(f2boptions['netban_ipv4']) NETBAN_IPV6 = '/' + str(f2boptions['netban_ipv6']) - WHITELIST = r.hgetall('F2B_WHITELIST') - ip = ipaddress.ip_address(address.decode('ascii')) + ip = ipaddress.ip_address(address) if type(ip) is ipaddress.IPv6Address and ip.ipv4_mapped: ip = ip.ipv4_mapped address = str(ip) if ip.is_private or ip.is_loopback: return - self_network = ipaddress.ip_network(address.decode('ascii')) - if WHITELIST: - for wl_key in WHITELIST: - wl_net = ipaddress.ip_network(wl_key.decode('ascii'), False) + self_network = ipaddress.ip_network(address) + + with lock: + temp_whitelist = set(WHITELIST) + + if temp_whitelist: + for wl_key in temp_whitelist: + wl_net = ipaddress.ip_network(wl_key, False) + if wl_net.overlaps(self_network): - log['time'] = int(round(time.time())) - log['priority'] = 'info' - log['message'] = 'Address %s is whitelisted by rule %s' % (self_network, wl_net) - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print 'Address %s is whitelisted by rule %s' % (self_network, wl_net) + logInfo('Address %s is whitelisted by rule %s' % (self_network, wl_net)) return - net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)).decode('ascii'), strict=False) + net = ipaddress.ip_network((address + (NETBAN_IPV4 if type(ip) is ipaddress.IPv4Address else NETBAN_IPV6)), strict=False) net = str(net) if not net in bans or time.time() - bans[net]['last_attempt'] > RETRY_WINDOW: @@ -142,11 +158,8 @@ def ban(address): active_window = time.time() - bans[net]['last_attempt'] if bans[net]['attempts'] >= MAX_ATTEMPTS: - log['time'] = int(round(time.time())) - log['priority'] = 'crit' - log['message'] = 'Banning %s' % net - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print 'Banning %s for %d minutes' % (net, BAN_TIME / 60) + cur_time = int(round(time.time())) + logCrit('Banning %s for %d minutes' % (net, BAN_TIME / 60)) if type(ip) is ipaddress.IPv4Address: with lock: chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') @@ -165,29 +178,18 @@ def ban(address): rule.target = target if rule not in chain.rules: chain.insert_rule(rule) - r.hset('F2B_ACTIVE_BANS', '%s' % net, log['time'] + BAN_TIME) + r.hset('F2B_ACTIVE_BANS', '%s' % net, cur_time + BAN_TIME) else: - log['time'] = int(round(time.time())) - log['priority'] = 'warn' - log['message'] = '%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net) - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print '%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net) + logWarn('%d more attempts in the next %d seconds until %s is banned' % (MAX_ATTEMPTS - bans[net]['attempts'], RETRY_WINDOW, net)) def unban(net): global lock - log['time'] = int(round(time.time())) - log['priority'] = 'info' - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) if not net in bans: - log['message'] = '%s is not banned, skipping unban and deleting from queue (if any)' % net - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print '%s is not banned, skipping unban and deleting from queue (if any)' % net + logInfo('%s is not banned, skipping unban and deleting from queue (if any)' % net) r.hdel('F2B_QUEUE_UNBAN', '%s' % net) return - log['message'] = 'Unbanning %s' % net - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print 'Unbanning %s' % net - if type(ipaddress.ip_network(net.decode('ascii'))) is ipaddress.IPv4Network: + logInfo('Unbanning %s' % net) + if type(ipaddress.ip_network(net)) is ipaddress.IPv4Network: with lock: chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') rule = iptc.Rule() @@ -210,17 +212,47 @@ def unban(net): if net in bans: del bans[net] +def permBan(net, unban=False): + global lock + + if type(ipaddress.ip_network(net, strict=False)) is ipaddress.IPv4Network: + with lock: + chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') + rule = iptc.Rule() + rule.src = net + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules and not unban: + logCrit('Add host/network %s to blacklist' % net) + chain.insert_rule(rule) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif rule in chain.rules and unban: + logCrit('Remove host/network %s from blacklist' % net) + chain.delete_rule(rule) + r.hdel('F2B_PERM_BANS', '%s' % net) + else: + with lock: + chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW') + rule = iptc.Rule6() + rule.src = net + target = iptc.Target(rule, "REJECT") + rule.target = target + if rule not in chain.rules and not unban: + logCrit('Add host/network %s to blacklist' % net) + chain.insert_rule(rule) + r.hset('F2B_PERM_BANS', '%s' % net, int(round(time.time()))) + elif rule in chain.rules and unban: + logCrit('Remove host/network %s from blacklist' % net) + chain.delete_rule(rule) + r.hdel('F2B_PERM_BANS', '%s' % net) + def quit(signum, frame): global quit_now quit_now = True def clear(): global lock - log['time'] = int(round(time.time())) - log['priority'] = 'info' - log['message'] = 'Clearing all bans' - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print 'Clearing all bans' + logInfo('Clearing all bans') for net in bans.copy(): unban(net) with lock: @@ -249,28 +281,20 @@ def clear(): pubsub.unsubscribe() def watch(): - log['time'] = int(round(time.time())) - log['priority'] = 'info' - log['message'] = 'Watching Redis channel F2B_CHANNEL' - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) + logInfo('Watching Redis channel F2B_CHANNEL') pubsub.subscribe('F2B_CHANNEL') - print 'Subscribing to Redis channel F2B_CHANNEL' while not quit_now: for item in pubsub.listen(): - for rule_id, rule_regex in RULES.iteritems(): + for rule_id, rule_regex in RULES.items(): if item['data'] and item['type'] == 'message': result = re.search(rule_regex, item['data']) if result: addr = result.group(1) - ip = ipaddress.ip_address(addr.decode('ascii')) + ip = ipaddress.ip_address(addr) if ip.is_private or ip.is_loopback: continue - print '%s matched rule id %d' % (addr, rule_id) - log['time'] = int(round(time.time())) - log['priority'] = 'warn' - log['message'] = '%s matched rule id %d' % (addr, rule_id) - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) + logWarn('%s matched rule id %d' % (addr, rule_id)) ban(addr) def snat4(snat_target): @@ -294,11 +318,7 @@ def snat4(snat_target): chain = iptc.Chain(table, 'POSTROUTING') table.autocommit = False if get_snat4_rule() not in chain.rules: - log['time'] = int(round(time.time())) - log['priority'] = 'info' - log['message'] = 'Added POSTROUTING rule for source network ' + get_snat4_rule().src + ' to SNAT target ' + snat_target - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print log['message'] + logCrit('Added POSTROUTING rule for source network %s to SNAT target %s' % (get_snat4_rule().src, snat_target)) chain.insert_rule(get_snat4_rule()) table.commit() else: @@ -309,7 +329,7 @@ def snat4(snat_target): table.commit() table.autocommit = True except: - print 'Error running SNAT4, retrying...' + print('Error running SNAT4, retrying...') def snat6(snat_target): global lock @@ -332,11 +352,7 @@ def snat6(snat_target): chain = iptc.Chain(table, 'POSTROUTING') table.autocommit = False if get_snat6_rule() not in chain.rules: - log['time'] = int(round(time.time())) - log['priority'] = 'info' - log['message'] = 'Added POSTROUTING rule for source network ' + get_snat6_rule().src + ' to SNAT target ' + snat_target - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print log['message'] + logInfo('Added POSTROUTING rule for source network %s to SNAT target %s' % (get_snat6_rule().src, snat_target)) chain.insert_rule(get_snat6_rule()) table.commit() else: @@ -347,14 +363,14 @@ def snat6(snat_target): table.commit() table.autocommit = True except: - print 'Error running SNAT6, retrying...' + print('Error running SNAT6, retrying...') def autopurge(): while not quit_now: time.sleep(10) refreshF2boptions() - BAN_TIME = f2boptions['ban_time'] - MAX_ATTEMPTS = f2boptions['max_attempts'] + BAN_TIME = int(f2boptions['ban_time']) + MAX_ATTEMPTS = int(f2boptions['max_attempts']) QUEUE_UNBAN = r.hgetall('F2B_QUEUE_UNBAN') if QUEUE_UNBAN: for net in QUEUE_UNBAN: @@ -364,9 +380,101 @@ def autopurge(): if time.time() - bans[net]['last_attempt'] > BAN_TIME: unban(net) +def isIpNetwork(address): + try: + ipaddress.ip_network(address, False) + except ValueError: + return False + return True + + +def genNetworkList(list): + resolver = dns.resolver.Resolver() + hostnames = [] + networks = [] + + for key in list: + if isIpNetwork(key): + networks.append(key) + else: + hostnames.append(key) + + for hostname in hostnames: + hostname_ips = [] + for rdtype in ['A', 'AAAA']: + try: + answer = resolver.query(qname=hostname, rdtype=rdtype, lifetime=3) + except dns.exception.Timeout: + logInfo('Hostname %s timedout on resolve' % hostname) + break + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + continue + except dns.exception.DNSException as dnsexception: + logInfo('%s' % dnsexception) + continue + + for rdata in answer: + hostname_ips.append(rdata.to_text()) + + networks.extend(hostname_ips) + + return set(networks) + +def whitelistUpdate(): + global lock + global quit_now + global WHITELIST + + while not quit_now: + start_time = time.time() + list = r.hgetall('F2B_WHITELIST') + + new_whitelist = [] + + if list: + new_whitelist = genNetworkList(list) + + with lock: + if Counter(new_whitelist) != Counter(WHITELIST): + WHITELIST = new_whitelist + logInfo('Whitelist was changed, it has %s entries' % len(WHITELIST)) + + time.sleep(60.0 - ((time.time() - start_time) % 60.0)) + +def blacklistUpdate(): + global quit_now + global BLACKLIST + + while not quit_now: + start_time = time.time() + list = r.hgetall('F2B_BLACKLIST') + + new_blacklist = [] + + if list: + new_blacklist = genNetworkList(list) + + if Counter(new_blacklist) != Counter(BLACKLIST): + addban = set(new_blacklist).difference(BLACKLIST) + delban = set(BLACKLIST).difference(new_blacklist) + + BLACKLIST = new_blacklist + logInfo('Blacklist was changed, it has %s entries' % len(BLACKLIST)) + + if addban: + for net in addban: + permBan(net=net) + + if delban: + for net in delban: + permBan(net=net, unban=True) + + + time.sleep(60.0 - ((time.time() - start_time) % 60.0)) + def initChain(): # Is called before threads start, no locking - print "Initializing mailcow netfilter chain" + print("Initializing mailcow netfilter chain") # IPv4 if not iptc.Chain(iptc.Table(iptc.Table.FILTER), "MAILCOW") in iptc.Table(iptc.Table.FILTER).chains: iptc.Table(iptc.Table.FILTER).create_chain("MAILCOW") @@ -391,38 +499,7 @@ def initChain(): rule.target = target if rule not in chain.rules: chain.insert_rule(rule) - # Apply blacklist - BLACKLIST = r.hgetall('F2B_BLACKLIST') - if BLACKLIST: - for bl_key in BLACKLIST: - if type(ipaddress.ip_network(bl_key.decode('ascii'), strict=False)) is ipaddress.IPv4Network: - chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), 'MAILCOW') - rule = iptc.Rule() - rule.src = bl_key - target = iptc.Target(rule, "REJECT") - rule.target = target - if rule not in chain.rules: - log['time'] = int(round(time.time())) - log['priority'] = 'crit' - log['message'] = 'Blacklisting host/network %s' % bl_key - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print log['message'] - chain.insert_rule(rule) - r.hset('F2B_PERM_BANS', '%s' % bl_key, int(round(time.time()))) - else: - chain = iptc.Chain(iptc.Table6(iptc.Table6.FILTER), 'MAILCOW') - rule = iptc.Rule6() - rule.src = bl_key - target = iptc.Target(rule, "REJECT") - rule.target = target - if rule not in chain.rules: - log['time'] = int(round(time.time())) - log['priority'] = 'crit' - log['message'] = 'Blacklisting host/network %s' % bl_key - r.lpush('NETFILTER_LOG', json.dumps(log, ensure_ascii=False)) - print log['message'] - chain.insert_rule(rule) - r.hset('F2B_PERM_BANS', '%s' % bl_key, int(round(time.time()))) + if __name__ == '__main__': @@ -437,25 +514,25 @@ if __name__ == '__main__': if os.getenv('SNAT_TO_SOURCE') and os.getenv('SNAT_TO_SOURCE') is not 'n': try: - snat_ip = os.getenv('SNAT_TO_SOURCE').decode('ascii') + snat_ip = os.getenv('SNAT_TO_SOURCE') snat_ipo = ipaddress.ip_address(snat_ip) if type(snat_ipo) is ipaddress.IPv4Address: snat4_thread = Thread(target=snat4,args=(snat_ip,)) snat4_thread.daemon = True snat4_thread.start() except ValueError: - print os.getenv('SNAT_TO_SOURCE') + ' is not a valid IPv4 address' + print(os.getenv('SNAT_TO_SOURCE') + ' is not a valid IPv4 address') if os.getenv('SNAT6_TO_SOURCE') and os.getenv('SNAT6_TO_SOURCE') is not 'n': try: - snat_ip = os.getenv('SNAT6_TO_SOURCE').decode('ascii') + snat_ip = os.getenv('SNAT6_TO_SOURCE') snat_ipo = ipaddress.ip_address(snat_ip) if type(snat_ipo) is ipaddress.IPv6Address: snat6_thread = Thread(target=snat6,args=(snat_ip,)) snat6_thread.daemon = True snat6_thread.start() except ValueError: - print os.getenv('SNAT6_TO_SOURCE') + ' is not a valid IPv6 address' + print(os.getenv('SNAT6_TO_SOURCE') + ' is not a valid IPv6 address') autopurge_thread = Thread(target=autopurge) autopurge_thread.daemon = True @@ -464,6 +541,14 @@ if __name__ == '__main__': mailcowchainwatch_thread = Thread(target=mailcowChainOrder) mailcowchainwatch_thread.daemon = True mailcowchainwatch_thread.start() + + blacklistupdate_thread = Thread(target=blacklistUpdate) + blacklistupdate_thread.daemon = True + blacklistupdate_thread.start() + + whitelistupdate_thread = Thread(target=whitelistUpdate) + whitelistupdate_thread.daemon = True + whitelistupdate_thread.start() signal.signal(signal.SIGTERM, quit) atexit.register(clear) diff --git a/data/Dockerfiles/olefy/Dockerfile b/data/Dockerfiles/olefy/Dockerfile new file mode 100644 index 00000000..01ebe9ed --- /dev/null +++ b/data/Dockerfiles/olefy/Dockerfile @@ -0,0 +1,19 @@ +FROM alpine:3.10 +LABEL maintainer "Andre Peters " + +WORKDIR /app + +#RUN addgroup -S olefy && adduser -S olefy -G olefy \ +RUN apk add --virtual .build-deps gcc python3-dev musl-dev libffi-dev openssl-dev \ + && apk add --update --no-cache python3 openssl tzdata libmagic \ + && pip3 install --upgrade pip \ + && pip3 install --upgrade oletools asyncio python-magic \ + && apk del .build-deps + +ADD https://raw.githubusercontent.com/HeinleinSupport/olefy/master/olefy.py /app/ + +RUN chown -R nobody:nobody /app /tmp + +USER nobody + +CMD ["python3", "-u", "/app/olefy.py"] diff --git a/data/Dockerfiles/phpfpm/Dockerfile b/data/Dockerfiles/phpfpm/Dockerfile index de31031a..ea8502d7 100644 --- a/data/Dockerfiles/phpfpm/Dockerfile +++ b/data/Dockerfiles/phpfpm/Dockerfile @@ -1,11 +1,11 @@ -FROM php:7.3-fpm-alpine3.8 +FROM php:7.3-fpm-alpine3.10 LABEL maintainer "Andre Peters " -ENV APCU_PECL 5.1.16 -ENV IMAGICK_PECL 3.4.3 +ENV APCU_PECL 5.1.17 +ENV IMAGICK_PECL 3.4.4 #ENV MAILPARSE_PECL 3.0.2 ENV MEMCACHED_PECL 3.1.3 -ENV REDIS_PECL 4.2.0 +ENV REDIS_PECL 5.0.1 RUN apk add -U --no-cache autoconf \ bash \ @@ -53,13 +53,14 @@ RUN apk add -U --no-cache autoconf \ && docker-php-ext-enable apcu imagick memcached mailparse redis \ && pecl clear-cache \ && docker-php-ext-configure intl \ + && docker-php-ext-configure exif \ && docker-php-ext-configure gd \ --with-gd \ --enable-gd-native-ttf \ --with-freetype-dir=/usr/include/ \ --with-png-dir=/usr/include/ \ --with-jpeg-dir=/usr/include/ \ - && docker-php-ext-install -j 4 gd gettext intl ldap opcache pcntl pdo pdo_mysql soap sockets xmlrpc zip \ + && docker-php-ext-install -j 4 exif gd gettext intl ldap opcache pcntl pdo pdo_mysql soap sockets xmlrpc zip \ && docker-php-ext-configure imap --with-imap --with-imap-ssl \ && docker-php-ext-install -j 4 imap \ && apk del --purge autoconf \ diff --git a/data/Dockerfiles/phpfpm/docker-entrypoint.sh b/data/Dockerfiles/phpfpm/docker-entrypoint.sh index 76c4035e..cde365ff 100755 --- a/data/Dockerfiles/phpfpm/docker-entrypoint.sh +++ b/data/Dockerfiles/phpfpm/docker-entrypoint.sh @@ -19,29 +19,48 @@ if [[ -z $(redis-cli --raw -h redis-mailcow GET Q_RELEASE_FORMAT) ]]; then redis-cli --raw -h redis-mailcow SET Q_RELEASE_FORMAT raw fi +# Set max age of q items - if unset + +if [[ -z $(redis-cli --raw -h redis-mailcow GET Q_MAX_AGE) ]]; then + redis-cli --raw -h redis-mailcow SET Q_MAX_AGE 365 +fi + # Check of mysql_upgrade CONTAINER_ID= # Todo: Better check if upgrade failed # This can happen due to a broken sogo_view [ -s /mysql_upgrade_loop ] && SQL_LOOP_C=$(cat /mysql_upgrade_loop) -CONTAINER_ID=$(curl --silent --insecure https://dockerapi/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"mysql-mailcow\")) | .id") -if [[ ! -z "${CONTAINER_ID}" ]] && [[ "${CONTAINER_ID}" =~ [^a-zA-Z0-9] ]]; then - SQL_UPGRADE_RETURN=$(curl --silent --insecure -XPOST https://dockerapi/containers/${CONTAINER_ID}/exec -d '{"cmd":"system", "task":"mysql_upgrade"}' --silent -H 'Content-type: application/json' | jq -r .type) - if [[ ${SQL_UPGRADE_RETURN} == 'warning' ]]; then - if [ -z ${SQL_LOOP_C} ]; then - echo 1 > /mysql_upgrade_loop - echo "MySQL applied an upgrade, restarting PHP-FPM..." - exit 1 +until [[ ! -z "${CONTAINER_ID}" ]] && [[ "${CONTAINER_ID}" =~ ^[[:alnum:]]*$ ]]; do + CONTAINER_ID=$(curl --silent --insecure https://dockerapi/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], id: .Id}" 2> /dev/null | jq -rc "select( .name | tostring | contains(\"mysql-mailcow\")) | .id" 2> /dev/null) +done +echo "MySQL @ ${CONTAINER_ID}" +SQL_UPGRADE_RETURN=$(curl --silent --insecure -XPOST https://dockerapi/containers/${CONTAINER_ID}/exec -d '{"cmd":"system", "task":"mysql_upgrade"}' --silent -H 'Content-type: application/json' | jq -r .type) +if [[ ${SQL_UPGRADE_RETURN} == 'warning' ]]; then + if [ -z ${SQL_LOOP_C} ]; then + echo 1 > /mysql_upgrade_loop + echo "MySQL applied an upgrade" + POSTFIX=($(curl --silent --insecure https://dockerapi/containers/json | jq -r '.[] | {name: .Config.Labels["com.docker.compose.service"], id: .Id}' | jq -rc 'select( .name | tostring | contains("postfix-mailcow")) | .id' | tr "\n" " ")) + if [[ -z ${POSTFIX} ]]; then + echo "Could not determine Postfix container ID, skipping Postfix restart." else - rm /mysql_upgrade_loop - echo "MySQL was not applied previously, skipping. Restart php-fpm-mailcow to retry or run mysql_upgrade manually." - while ! mysqladmin status --socket=/var/run/mysqld/mysqld.sock -u${DBUSER} -p${DBPASS} --silent; do - echo "Waiting for SQL to return..." - sleep 2 - done + echo "Restarting Postfix" + curl -X POST --silent --insecure https://dockerapi/containers/${POSTFIX}/restart | jq -r '.msg' + echo "Sleeping 10 seconds..." + sleep 10 fi + echo "Restarting PHP-FPM, bye" + exit 1 + else + rm /mysql_upgrade_loop + echo "MySQL was not applied previously, skipping. Restart php-fpm-mailcow to retry or run mysql_upgrade manually." + while ! mysqladmin status --socket=/var/run/mysqld/mysqld.sock -u${DBUSER} -p${DBPASS} --silent; do + echo "Waiting for SQL to return..." + sleep 2 + done fi +else + echo "MySQL is up-to-date" fi # Trigger db init diff --git a/data/Dockerfiles/postfix/Dockerfile b/data/Dockerfiles/postfix/Dockerfile index 05f2c3c7..77173028 100644 --- a/data/Dockerfiles/postfix/Dockerfile +++ b/data/Dockerfiles/postfix/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:bionic +FROM debian:buster-slim LABEL maintainer "Andre Peters " ARG DEBIAN_FRONTEND=noninteractive @@ -9,12 +9,17 @@ RUN dpkg-divert --local --rename --add /sbin/initctl \ && dpkg-divert --local --rename --add /usr/bin/ischroot \ && ln -sf /bin/true /usr/bin/ischroot -RUN apt-get update && apt-get install -y --no-install-recommends \ +# Add groups and users before installing Postfix to not break compatibility +RUN groupadd -g 102 postfix \ + && groupadd -g 103 postdrop \ + && useradd -g postfix -u 101 -d /var/spool/postfix -s /usr/sbin/nologin postfix \ + && apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ curl \ dirmngr \ gnupg \ libsasl2-modules \ + mariadb-client \ perl \ postfix \ postfix-mysql \ @@ -32,18 +37,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ && printf '#!/bin/bash\n/usr/sbin/postconf -c /opt/postfix/conf "$@"' > /usr/local/sbin/postconf \ && chmod +x /usr/local/sbin/postconf -RUN addgroup --system --gid 600 zeyple \ - && adduser --system --home /var/lib/zeyple --no-create-home --uid 600 --gid 600 --disabled-login zeyple \ - && touch /var/log/zeyple.log \ - && chown zeyple: /var/log/zeyple.log \ - && mkdir -p /opt/mailman/var/data \ - && touch /opt/mailman/var/data/postfix_lmtp \ - && touch /opt/mailman/var/data/postfix_domains - -COPY zeyple.py /usr/local/bin/zeyple.py -COPY zeyple.conf /etc/zeyple.conf COPY supervisord.conf /etc/supervisor/supervisord.conf COPY syslog-ng.conf /etc/syslog-ng/syslog-ng.conf +COPY stop-supervisor.sh /usr/local/sbin/stop-supervisor.sh COPY postfix.sh /opt/postfix.sh COPY rspamd-pipe-ham /usr/local/bin/rspamd-pipe-ham COPY rspamd-pipe-spam /usr/local/bin/rspamd-pipe-spam diff --git a/data/Dockerfiles/postfix/postfix.sh b/data/Dockerfiles/postfix/postfix.sh index 6ec5cc1d..3d060e8c 100755 --- a/data/Dockerfiles/postfix/postfix.sh +++ b/data/Dockerfiles/postfix/postfix.sh @@ -4,14 +4,23 @@ trap "postfix stop" EXIT [[ ! -d /opt/postfix/conf/sql/ ]] && mkdir -p /opt/postfix/conf/sql/ +# Wait for MySQL to warm-up +while ! mysqladmin status --socket=/var/run/mysqld/mysqld.sock -u${DBUSER} -p${DBPASS} --silent; do + echo "Waiting for database to come up..." + sleep 2 +done + cat < /etc/aliases +# Autogenerated by mailcow null: /dev/null +watchdog: /dev/null ham: "|/usr/local/bin/rspamd-pipe-ham" spam: "|/usr/local/bin/rspamd-pipe-spam" EOF newaliases; cat < /opt/postfix/conf/sql/mysql_relay_recipient_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -30,6 +39,7 @@ query = SELECT DISTINCT EOF cat < /opt/postfix/conf/sql/mysql_tls_policy_override_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -38,6 +48,7 @@ query = SELECT CONCAT(policy, ' ', parameters) AS tls_policy FROM tls_policy_ove EOF cat < /opt/postfix/conf/sql/mysql_tls_enforce_in_policy.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -55,6 +66,7 @@ query = SELECT IF(EXISTS( EOF cat < /opt/postfix/conf/sql/mysql_sender_dependent_default_transport_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -86,6 +98,7 @@ query = SELECT GROUP_CONCAT(transport SEPARATOR '') AS transport_maps EOF cat < /opt/postfix/conf/sql/mysql_transport_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -95,7 +108,18 @@ query = SELECT CONCAT('smtp_via_transport_maps:', nexthop) AS transport FROM tra AND destination = '%s'; EOF +cat < /opt/postfix/conf/sql/mysql_virtual_resource_maps.cf +# Autogenerated by mailcow +user = ${DBUSER} +password = ${DBPASS} +hosts = unix:/var/run/mysqld/mysqld.sock +dbname = ${DBNAME} +query = SELECT 'null@localhost' FROM mailbox + WHERE kind REGEXP 'location|thing|group' AND username = '%s'; +EOF + cat < /opt/postfix/conf/sql/mysql_sasl_passwd_maps_sender_dependent.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -104,8 +128,8 @@ query = SELECT CONCAT_WS(':', username, password) AS auth_data FROM relayhosts WHERE id IN ( SELECT relayhost FROM domain WHERE CONCAT('@', domain) = '%s' - OR '%s' IN ( - SELECT CONCAT('@', alias_domain) FROM alias_domain + OR domain IN ( + SELECT target_domain FROM alias_domain WHERE CONCAT('@', alias_domain) = '%s' ) ) AND active = '1' @@ -113,6 +137,7 @@ query = SELECT CONCAT_WS(':', username, password) AS auth_data FROM relayhosts EOF cat < /opt/postfix/conf/sql/mysql_sasl_passwd_maps_transport_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -124,18 +149,8 @@ query = SELECT CONCAT_WS(':', username, password) AS auth_data FROM transports LIMIT 1; EOF -cat < /opt/postfix/conf/sql/mysql_virtual_alias_domain_catchall_maps.cf -user = ${DBUSER} -password = ${DBPASS} -hosts = unix:/var/run/mysqld/mysqld.sock -dbname = ${DBNAME} -query = SELECT goto FROM alias, alias_domain - WHERE alias_domain.alias_domain = '%d' - AND alias.address = CONCAT('@', alias_domain.target_domain) - AND alias.active = 1 AND alias_domain.active='1' -EOF - cat < /opt/postfix/conf/sql/mysql_virtual_alias_domain_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -148,6 +163,7 @@ query = SELECT username FROM mailbox, alias_domain EOF cat < /opt/postfix/conf/sql/mysql_virtual_alias_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -158,6 +174,7 @@ query = SELECT goto FROM alias EOF cat < /opt/postfix/conf/sql/mysql_recipient_bcc_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -169,6 +186,7 @@ query = SELECT bcc_dest FROM bcc_maps EOF cat < /opt/postfix/conf/sql/mysql_sender_bcc_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -180,6 +198,7 @@ query = SELECT bcc_dest FROM bcc_maps EOF cat < /opt/postfix/conf/sql/mysql_recipient_canonical_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -190,6 +209,7 @@ query = SELECT new_dest FROM recipient_maps EOF cat < /opt/postfix/conf/sql/mysql_virtual_domains_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -203,6 +223,7 @@ query = SELECT alias_domain from alias_domain WHERE alias_domain='%s' AND active EOF cat < /opt/postfix/conf/sql/mysql_virtual_mailbox_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -211,6 +232,7 @@ query = SELECT CONCAT(JSON_UNQUOTE(JSON_EXTRACT(attributes, '$.mailbox_format')) EOF cat < /opt/postfix/conf/sql/mysql_virtual_relay_domain_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -219,6 +241,7 @@ query = SELECT domain FROM domain WHERE domain='%s' AND backupmx = '1' AND activ EOF cat < /opt/postfix/conf/sql/mysql_virtual_sender_acl.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -260,6 +283,7 @@ query = SELECT goto FROM alias EOF cat < /opt/postfix/conf/sql/mysql_virtual_spamalias_maps.cf +# Autogenerated by mailcow user = ${DBUSER} password = ${DBPASS} hosts = unix:/var/run/mysqld/mysqld.sock @@ -269,10 +293,11 @@ query = SELECT goto FROM spamalias AND validity >= UNIX_TIMESTAMP() EOF -# Reset GPG key permissions -mkdir -p /var/lib/zeyple/keys -chmod 700 /var/lib/zeyple/keys -chown -R 600:600 /var/lib/zeyple/keys +sed -i '/User overrides/q' /opt/postfix/conf/main.cf +echo >> /opt/postfix/conf/main.cf +if [ -f /opt/postfix/conf/extra.cf ]; then + cat /opt/postfix/conf/extra.cf >> /opt/postfix/conf/main.cf +fi # Fix Postfix permissions chown -R root:postfix /opt/postfix/conf/sql/ @@ -282,7 +307,7 @@ chgrp -R postdrop /var/spool/postfix/maildrop postfix set-permissions # Check Postfix configuration -postconf -c /opt/postfix/conf +postconf -c /opt/postfix/conf > /dev/null if [[ $? != 0 ]]; then echo "Postfix configuration error, refusing to start." diff --git a/data/Dockerfiles/postfix/supervisord.conf b/data/Dockerfiles/postfix/supervisord.conf index 27494bd6..134a6c6d 100644 --- a/data/Dockerfiles/postfix/supervisord.conf +++ b/data/Dockerfiles/postfix/supervisord.conf @@ -1,4 +1,5 @@ [supervisord] +pidfile=/var/run/supervisord.pid nodaemon=true user=root @@ -12,6 +13,10 @@ autostart=true [program:postfix] command=/opt/postfix.sh +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 autorestart=true [eventlistener:processes] diff --git a/data/Dockerfiles/postfix/syslog-ng.conf b/data/Dockerfiles/postfix/syslog-ng.conf index 5d11a3b7..21044719 100644 --- a/data/Dockerfiles/postfix/syslog-ng.conf +++ b/data/Dockerfiles/postfix/syslog-ng.conf @@ -1,9 +1,10 @@ -@version: 3.13 +@version: 3.19 @include "scl.conf" options { chain_hostnames(off); flush_lines(0); use_dns(no); + dns_cache(no); use_fqdn(no); owner("root"); group("adm"); perm(0640); stats_freq(0); diff --git a/data/Dockerfiles/postfix/zeyple.conf b/data/Dockerfiles/postfix/zeyple.conf deleted file mode 100644 index cc176a0e..00000000 --- a/data/Dockerfiles/postfix/zeyple.conf +++ /dev/null @@ -1,9 +0,0 @@ -[zeyple] -log_file = /dev/null - -[gpg] -home = /var/lib/zeyple/keys - -[relay] -host = localhost -port = 10026 diff --git a/data/Dockerfiles/postfix/zeyple.py b/data/Dockerfiles/postfix/zeyple.py deleted file mode 100755 index bb218831..00000000 --- a/data/Dockerfiles/postfix/zeyple.py +++ /dev/null @@ -1,274 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -import sys -import os -import logging -import email -import email.mime.multipart -import email.mime.application -import email.encoders -import smtplib -import copy -from io import BytesIO - -try: - from configparser import SafeConfigParser # Python 3 -except ImportError: - from ConfigParser import SafeConfigParser # Python 2 - -import gpgme - -# Boiler plate to avoid dependency on six -# BBB: Python 2.7 support -PY3K = sys.version_info > (3, 0) - - -def message_from_binary(message): - if PY3K: - return email.message_from_bytes(message) - else: - return email.message_from_string(message) - - -def as_binary_string(email): - if PY3K: - return email.as_bytes() - else: - return email.as_string() - - -def encode_string(string): - if isinstance(string, bytes): - return string - else: - return string.encode('utf-8') - - -__title__ = 'Zeyple' -__version__ = '1.2.0' -__author__ = 'Cédric Félizard' -__license__ = 'AGPLv3+' -__copyright__ = 'Copyright 2012-2016 Cédric Félizard' - - -class Zeyple: - """Zeyple Encrypts Your Precious Log Emails""" - - def __init__(self, config_fname='zeyple.conf'): - self.config = self.load_configuration(config_fname) - - log_file = self.config.get('zeyple', 'log_file') - logging.basicConfig( - filename=log_file, level=logging.DEBUG, - format='%(asctime)s %(process)s %(levelname)s %(message)s' - ) - logging.info("Zeyple ready to encrypt outgoing emails") - - def load_configuration(self, filename): - """Reads and parses the config file""" - - config = SafeConfigParser() - config.read([ - os.path.join('/etc/', filename), - filename, - ]) - if not config.sections(): - raise IOError('Cannot open config file.') - return config - - @property - def gpg(self): - protocol = gpgme.PROTOCOL_OpenPGP - - if self.config.has_option('gpg', 'executable'): - executable = self.config.get('gpg', 'executable') - else: - executable = None # Default value - - home_dir = self.config.get('gpg', 'home') - - ctx = gpgme.Context() - ctx.set_engine_info(protocol, executable, home_dir) - ctx.armor = True - - return ctx - - def process_message(self, message_data, recipients): - """Encrypts the message with recipient keys""" - message_data = encode_string(message_data) - - in_message = message_from_binary(message_data) - logging.info( - "Processing outgoing message %s", in_message['Message-id']) - - if not recipients: - logging.warn("Cannot find any recipients, ignoring") - - sent_messages = [] - for recipient in recipients: - logging.info("Recipient: %s", recipient) - - key_id = self._user_key(recipient) - logging.info("Key ID: %s", key_id) - - if key_id: - out_message = self._encrypt_message(in_message, key_id) - - # Delete Content-Transfer-Encoding if present to default to - # "7bit" otherwise Thunderbird seems to hang in some cases. - del out_message["Content-Transfer-Encoding"] - else: - logging.warn("No keys found, message will be sent unencrypted") - out_message = copy.copy(in_message) - - self._add_zeyple_header(out_message) - self._send_message(out_message, recipient) - sent_messages.append(out_message) - - return sent_messages - - def _get_version_part(self): - ret = email.mime.application.MIMEApplication( - 'Version: 1\n', - 'pgp-encrypted', - email.encoders.encode_noop, - ) - ret.add_header( - 'Content-Description', - "PGP/MIME version identification", - ) - return ret - - def _get_encrypted_part(self, payload): - ret = email.mime.application.MIMEApplication( - payload, - 'octet-stream', - email.encoders.encode_noop, - name="encrypted.asc", - ) - ret.add_header('Content-Description', "OpenPGP encrypted message") - ret.add_header( - 'Content-Disposition', - 'inline', - filename='encrypted.asc', - ) - return ret - - def _encrypt_message(self, in_message, key_id): - if in_message.is_multipart(): - # get the body (after the first \n\n) - payload = in_message.as_string().split("\n\n", 1)[1].strip() - - # prepend the Content-Type including the boundary - content_type = "Content-Type: " + in_message["Content-Type"] - payload = content_type + "\n\n" + payload - - message = email.message.Message() - message.set_payload(payload) - - payload = message.get_payload() - - else: - payload = in_message.get_payload() - payload = encode_string(payload) - - quoted_printable = email.charset.Charset('ascii') - quoted_printable.body_encoding = email.charset.QP - - message = email.mime.nonmultipart.MIMENonMultipart( - 'text', 'plain', charset='utf-8' - ) - message.set_payload(payload, charset=quoted_printable) - - mixed = email.mime.multipart.MIMEMultipart( - 'mixed', - None, - [message], - ) - - # remove superfluous header - del mixed['MIME-Version'] - - payload = as_binary_string(mixed) - - encrypted_payload = self._encrypt_payload(payload, [key_id]) - - version = self._get_version_part() - encrypted = self._get_encrypted_part(encrypted_payload) - - out_message = copy.copy(in_message) - out_message.preamble = "This is an OpenPGP/MIME encrypted " \ - "message (RFC 4880 and 3156)" - - if 'Content-Type' not in out_message: - out_message['Content-Type'] = 'multipart/encrypted' - else: - out_message.replace_header( - 'Content-Type', - 'multipart/encrypted', - ) - - out_message.set_param('protocol', 'application/pgp-encrypted') - out_message.set_payload([version, encrypted]) - - return out_message - - def _encrypt_payload(self, payload, key_ids): - """Encrypts the payload with the given keys""" - payload = encode_string(payload) - - plaintext = BytesIO(payload) - ciphertext = BytesIO() - - self.gpg.armor = True - - recipient = [self.gpg.get_key(key_id) for key_id in key_ids] - - self.gpg.encrypt(recipient, gpgme.ENCRYPT_ALWAYS_TRUST, - plaintext, ciphertext) - - return ciphertext.getvalue() - - def _user_key(self, email): - """Returns the GPG key for the given email address""" - logging.info("Trying to encrypt for %s", email) - keys = [key for key in self.gpg.keylist(email)] - - if keys: - key = keys.pop() # NOTE: looks like keys[0] is the master key - key_id = key.subkeys[0].keyid - return key_id - - return None - - def _add_zeyple_header(self, message): - if self.config.has_option('zeyple', 'add_header') and \ - self.config.getboolean('zeyple', 'add_header'): - message.add_header( - 'X-Zeyple', - "processed by {0} v{1}".format(__title__, __version__) - ) - - def _send_message(self, message, recipient): - """Sends the given message through the SMTP relay""" - logging.info("Sending message %s", message['Message-id']) - - smtp = smtplib.SMTP(self.config.get('relay', 'host'), - self.config.get('relay', 'port')) - - smtp.sendmail(message['From'], recipient, message.as_string()) - smtp.quit() - - logging.info("Message %s sent", message['Message-id']) - - -if __name__ == '__main__': - recipients = sys.argv[1:] - - # BBB: Python 2.7 support - binary_stdin = sys.stdin.buffer if PY3K else sys.stdin - message = binary_stdin.read() - - zeyple = Zeyple() - zeyple.process_message(message, recipients) diff --git a/data/Dockerfiles/rspamd/Dockerfile b/data/Dockerfiles/rspamd/Dockerfile index a2b5f7f8..58cd1ac5 100644 --- a/data/Dockerfiles/rspamd/Dockerfile +++ b/data/Dockerfiles/rspamd/Dockerfile @@ -1,27 +1,29 @@ -FROM ubuntu:bionic +FROM debian:buster-slim LABEL maintainer "Andre Peters " ARG DEBIAN_FRONTEND=noninteractive +ARG CODENAME=buster ENV LC_ALL C RUN apt-get update && apt-get install -y \ tzdata \ - ca-certificates \ - gnupg2 \ - apt-transport-https \ - && apt-key adv --fetch-keys https://rspamd.com/apt/gpg.key \ - && echo "deb https://rspamd.com/apt-stable/ bionic main" > /etc/apt/sources.list.d/rspamd.list \ - && apt-get update && apt-get install -y rspamd \ - && rm -rf /var/lib/apt/lists/* \ - && echo '.include $LOCAL_CONFDIR/local.d/rspamd.conf.local' > /etc/rspamd/rspamd.conf.local \ - && apt-get autoremove --purge \ - && apt-get clean \ - && mkdir -p /run/rspamd \ - && chown _rspamd:_rspamd /run/rspamd + ca-certificates \ + gnupg2 \ + apt-transport-https \ + dnsutils \ + && apt-key adv --fetch-keys https://rspamd.com/apt-stable/gpg.key \ + && echo "deb [arch=amd64] https://rspamd.com/apt-stable/ $CODENAME main" > /etc/apt/sources.list.d/rspamd.list \ + && echo "deb-src [arch=amd64] https://rspamd.com/apt-stable/ $CODENAME main" >> /etc/apt/sources.list.d/rspamd.list \ + && apt-get update \ + && apt-get --no-install-recommends -y install rspamd \ + && rm -rf /var/lib/apt/lists/* \ + && apt-get autoremove --purge \ + && apt-get clean \ + && mkdir -p /run/rspamd \ + && chown _rspamd:_rspamd /run/rspamd COPY settings.conf /etc/rspamd/settings.conf COPY docker-entrypoint.sh /docker-entrypoint.sh -COPY metadata_exporter.lua /usr/share/rspamd/lua/metadata_exporter.lua ENTRYPOINT ["/docker-entrypoint.sh"] diff --git a/data/Dockerfiles/rspamd/docker-entrypoint.sh b/data/Dockerfiles/rspamd/docker-entrypoint.sh index 6288550d..cb7d0563 100755 --- a/data/Dockerfiles/rspamd/docker-entrypoint.sh +++ b/data/Dockerfiles/rspamd/docker-entrypoint.sh @@ -1,9 +1,37 @@ #!/bin/bash -chown -R _rspamd:_rspamd /var/lib/rspamd /etc/rspamd/local.d /etc/rspamd/override.d /etc/rspamd/custom +mkdir -p /etc/rspamd/plugins.d \ + /etc/rspamd/custom + +touch /etc/rspamd/rspamd.conf.local \ + /etc/rspamd/rspamd.conf.override + chmod 755 /var/lib/rspamd -[[ ! -f /etc/rspamd/override.d/worker-controller-password.inc ]] && echo '# Placeholder' > /etc/rspamd/override.d/worker-controller-password.inc -chown _rspamd:_rspamd /etc/rspamd/override.d/worker-controller-password.inc -[[ ! -f /etc/rspamd/custom/sa-rules-heinlein ]] && echo '# to be auto-filled by dovecot-mailcow' > /etc/rspamd/custom/sa-rules-heinlein + +[[ ! -f /etc/rspamd/override.d/worker-controller-password.inc ]] && echo '# Autogenerated by mailcow' > /etc/rspamd/override.d/worker-controller-password.inc +[[ ! -f /etc/rspamd/custom/sa-rules-heinlein ]] && echo '# Autogenerated by mailcow' > /etc/rspamd/custom/sa-rules-heinlein +[[ ! -f /etc/rspamd/custom/dovecot_trusted.map ]] && echo '# Autogenerated by mailcow' > /etc/rspamd/custom/dovecot_trusted.map + +DOVECOT_V4= +DOVECOT_V6= +until [[ ! -z ${DOVECOT_V4} ]]; do + DOVECOT_V4=$(dig a dovecot +short) + DOVECOT_V6=$(dig aaaa dovecot +short) + [[ ! -z ${DOVECOT_V4} ]] && break; + echo "Waiting for Dovecot" + sleep 3 +done +echo ${DOVECOT_V4}/32 > /etc/rspamd/custom/dovecot_trusted.map +if [[ ! -z ${DOVECOT_V6} ]]; then + echo ${DOVECOT_V6}/128 >> /etc/rspamd/custom/dovecot_trusted.map +fi + +chown -R _rspamd:_rspamd /var/lib/rspamd \ + /etc/rspamd/local.d \ + /etc/rspamd/override.d \ + /etc/rspamd/custom \ + /etc/rspamd/rspamd.conf.local \ + /etc/rspamd/rspamd.conf.override \ + /etc/rspamd/plugins.d exec "$@" diff --git a/data/Dockerfiles/sogo/Dockerfile b/data/Dockerfiles/sogo/Dockerfile index 085f1bc2..6d57aa4b 100644 --- a/data/Dockerfiles/sogo/Dockerfile +++ b/data/Dockerfiles/sogo/Dockerfile @@ -3,7 +3,7 @@ LABEL maintainer "Andre Peters " ARG DEBIAN_FRONTEND=noninteractive ENV LC_ALL C -ENV GOSU_VERSION 1.9 +ENV GOSU_VERSION 1.11 # Prerequisites RUN apt-get update && apt-get install -y --no-install-recommends \ @@ -13,6 +13,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gettext \ gnupg \ mysql-client \ + rsync \ supervisor \ syslog-ng \ syslog-ng-core \ @@ -22,23 +23,19 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ psmisc \ wget \ patch \ - && rm -rf /var/lib/apt/lists/* \ && dpkgArch="$(dpkg --print-architecture | awk -F- '{ print $NF }')" \ && wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch" \ && chmod +x /usr/local/bin/gosu \ - && gosu nobody true - -RUN mkdir /usr/share/doc/sogo \ + && gosu nobody true \ + && mkdir /usr/share/doc/sogo \ && touch /usr/share/doc/sogo/empty.sh \ && apt-key adv --keyserver keyserver.ubuntu.com --recv-key 0x810273C4 \ && echo "deb http://packages.inverse.ca/SOGo/nightly/4/debian/ stretch stretch" > /etc/apt/sources.list.d/sogo.list \ - && apt-get update && apt-get install -y --force-yes \ + && apt-get update && apt-get install -y --no-install-recommends \ sogo \ sogo-activesync \ + && apt-get autoclean \ && rm -rf /var/lib/apt/lists/* \ - && echo '* * * * * sogo /usr/sbin/sogo-ealarms-notify -p /etc/sogo/sieve.creds 2>/dev/null' > /etc/cron.d/sogo \ - && echo '* * * * * sogo /usr/sbin/sogo-tool expire-sessions 60' >> /etc/cron.d/sogo \ - && echo '0 0 * * * sogo /usr/sbin/sogo-tool update-autoreply -p /etc/sogo/sieve.creds' >> /etc/cron.d/sogo \ && touch /etc/default/locale COPY ./bootstrap-sogo.sh /bootstrap-sogo.sh @@ -51,7 +48,3 @@ RUN chmod +x /bootstrap-sogo.sh \ /usr/local/sbin/stop-supervisor.sh CMD exec /usr/bin/supervisord -c /etc/supervisor/supervisord.conf - -VOLUME /usr/lib/GNUstep/SOGo/ - -RUN rm -rf /tmp/* /var/tmp/* diff --git a/data/Dockerfiles/sogo/bootstrap-sogo.sh b/data/Dockerfiles/sogo/bootstrap-sogo.sh index 5072a306..b85c9de4 100755 --- a/data/Dockerfiles/sogo/bootstrap-sogo.sh +++ b/data/Dockerfiles/sogo/bootstrap-sogo.sh @@ -14,11 +14,11 @@ do done # Wait for updated schema -DBV_NOW=$(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT version FROM versions;" -BN) +DBV_NOW=$(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT version FROM versions WHERE application = 'db_schema';" -BN) DBV_NEW=$(grep -oE '\$db_version = .*;' init_db.inc.php | sed 's/$db_version = //g;s/;//g' | cut -d \" -f2) while [[ ${DBV_NOW} != ${DBV_NEW} ]]; do echo "Waiting for schema update..." - DBV_NOW=$(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT version FROM versions;" -BN) + DBV_NOW=$(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT version FROM versions WHERE application = 'db_schema';" -BN) DBV_NEW=$(grep -oE '\$db_version = .*;' init_db.inc.php | sed 's/$db_version = //g;s/;//g' | cut -d \" -f2) sleep 5 done @@ -30,10 +30,11 @@ mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e while [[ ${VIEW_OK} != 'OK' ]]; do mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} << EOF -CREATE VIEW sogo_view (c_uid, domain, c_name, c_password, c_cn, mail, aliases, ad_aliases, kind, multiple_bookings) AS -SELECT mailbox.username, mailbox.domain, mailbox.username, if(json_extract(attributes, '$.force_pw_update') LIKE '%0%', if(json_extract(attributes, '$.sogo_access') LIKE '%1%', password, 'invalid'), 'invalid'), mailbox.name, mailbox.username, IFNULL(GROUP_CONCAT(ga.aliases SEPARATOR ' '), ''), IFNULL(gda.ad_alias, ''), mailbox.kind, mailbox.multiple_bookings FROM mailbox +CREATE VIEW sogo_view (c_uid, domain, c_name, c_password, c_cn, mail, aliases, ad_aliases, ext_acl, kind, multiple_bookings) AS +SELECT mailbox.username, mailbox.domain, mailbox.username, if(json_extract(attributes, '$.force_pw_update') LIKE '%0%', if(json_extract(attributes, '$.sogo_access') LIKE '%1%', password, '{SSHA256}A123A123A321A321A321B321B321B123B123B321B432F123E321123123321321'), '{SSHA256}A123A123A321A321A321B321B321B123B123B321B432F123E321123123321321'), mailbox.name, mailbox.username, IFNULL(GROUP_CONCAT(ga.aliases SEPARATOR ' '), ''), IFNULL(gda.ad_alias, ''), IFNULL(external_acl.send_as_acl, ''), mailbox.kind, mailbox.multiple_bookings FROM mailbox LEFT OUTER JOIN grouped_mail_aliases ga ON ga.username REGEXP CONCAT('(^|,)', mailbox.username, '($|,)') LEFT OUTER JOIN grouped_domain_alias_address gda ON gda.username = mailbox.username +LEFT OUTER JOIN grouped_sender_acl_external external_acl ON external_acl.username = mailbox.username WHERE mailbox.active = '1' GROUP BY mailbox.username; EOF @@ -51,7 +52,7 @@ while [[ ${STATIC_VIEW_OK} != 'OK' ]]; do if [[ ! -z $(mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -B -e "SELECT 'OK' FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = '_sogo_static_view'") ]]; then STATIC_VIEW_OK=OK echo "Updating _sogo_static_view content..." - mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -B -e "REPLACE INTO _sogo_static_view (c_uid, domain, c_name, c_password, c_cn, mail, aliases, ad_aliases, kind, multiple_bookings) SELECT c_uid, domain, c_name, c_password, c_cn, mail, aliases, ad_aliases, kind, multiple_bookings from sogo_view;" + mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -B -e "REPLACE INTO _sogo_static_view (c_uid, domain, c_name, c_password, c_cn, mail, aliases, ad_aliases, ext_acl, kind, multiple_bookings) SELECT c_uid, domain, c_name, c_password, c_cn, mail, aliases, ad_aliases, ext_acl, kind, multiple_bookings from sogo_view;" mysql --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -B -e "DELETE FROM _sogo_static_view WHERE c_uid NOT IN (SELECT username FROM mailbox WHERE active = '1')" else echo "Waiting for database initialization..." @@ -83,9 +84,16 @@ EOF done -mkdir -p /var/lib/sogo/GNUstep/Defaults/ +if [[ "${ALLOW_ADMIN_EMAIL_LOGIN}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then + TRUST_PROXY="YES" +else + TRUST_PROXY="NO" +fi +# cat /dev/urandom seems to hang here occasionally and is not recommended anyway, better use openssl +RAND_PASS=$(openssl rand -base64 16 | tr -dc _A-Z-a-z-0-9) # Generate plist header with timezone data +mkdir -p /var/lib/sogo/GNUstep/Defaults/ cat < /var/lib/sogo/GNUstep/Defaults/sogod.plist @@ -93,6 +101,12 @@ cat < /var/lib/sogo/GNUstep/Defaults/sogod.plist OCSAclURL mysql://${DBUSER}:${DBPASS}@%2Fvar%2Frun%2Fmysqld%2Fmysqld.sock/${DBNAME}/sogo_acl + SOGoIMAPServer + imaps://${IPV4_NETWORK}.250:993 + SOGoTrustProxyAuthentication + ${TRUST_PROXY} + SOGoEncryptionKey + ${RAND_PASS} OCSCacheFolderURL mysql://${DBUSER}:${DBPASS}@%2Fvar%2Frun%2Fmysqld%2Fmysqld.sock/${DBNAME}/sogo_cache_folder OCSEMailAlarmsFolderURL @@ -125,6 +139,7 @@ while read -r line gal aliases ad_aliases + ext_acl KindFieldName kind @@ -168,19 +183,29 @@ chown sogo:sogo -R /var/lib/sogo/ chmod 600 /var/lib/sogo/GNUstep/Defaults/sogod.plist # Patch ACLs -if [[ ${ACL_ANYONE} == 'allow' ]]; then - #enable any or authenticated targets for ACL - if patch -R -sfN --dry-run /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff > /dev/null; then - patch -R /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff; - fi -else - #disable any or authenticated targets for ACL - if patch -sfN --dry-run /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff > /dev/null; then - patch /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff; - fi -fi +#if [[ ${ACL_ANYONE} == 'allow' ]]; then +# #enable any or authenticated targets for ACL +# if patch -R -sfN --dry-run /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff > /dev/null; then +# patch -R /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff; +# fi +#else +# #disable any or authenticated targets for ACL +# if patch -sfN --dry-run /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff > /dev/null; then +# patch /usr/lib/GNUstep/SOGo/Templates/UIxAclEditor.wox < /acl.diff; +# fi +#fi # Copy logo, if any [[ -f /etc/sogo/sogo-full.svg ]] && cp /etc/sogo/sogo-full.svg /usr/lib/GNUstep/SOGo/WebServerResources/img/sogo-full.svg +# Rsync web content +echo "Syncing web content with named volume" +rsync -a /usr/lib/GNUstep/SOGo/. /sogo_web/ + +# Creating cronjobs +echo "* * * * * sogo /usr/sbin/sogo-ealarms-notify -p /etc/sogo/sieve.creds 2>/dev/null" > /etc/cron.d/sogo +echo "* * * * * sogo /usr/sbin/sogo-tool expire-sessions ${SOGO_EXPIRE_SESSION}" >> /etc/cron.d/sogo +echo "0 0 * * * sogo /usr/sbin/sogo-tool update-autoreply -p /etc/sogo/sieve.creds" >> /etc/cron.d/sogo + + exec gosu sogo /usr/sbin/sogod diff --git a/data/Dockerfiles/solr/Dockerfile b/data/Dockerfiles/solr/Dockerfile index 67cd3384..d3ae98a8 100644 --- a/data/Dockerfiles/solr/Dockerfile +++ b/data/Dockerfiles/solr/Dockerfile @@ -1,9 +1,25 @@ -FROM solr:7-alpine -USER root -COPY docker-entrypoint.sh / +FROM solr:7.7-slim -RUN apk --no-cache add su-exec curl tzdata \ +USER root + +ENV GOSU_VERSION 1.11 + +COPY docker-entrypoint.sh / +COPY solr-config-7.7.0.xml / +COPY solr-schema-7.7.0.xml / + +RUN dpkgArch="$(dpkg --print-architecture | awk -F- '{ print $NF }')" \ + && wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch" \ + && chmod +x /usr/local/bin/gosu \ + && gosu nobody true \ + && apt-get update && apt-get install -y --no-install-recommends \ + tzdata \ + curl \ + bash \ + && apt-get autoclean \ + && rm -rf /var/lib/apt/lists/* \ && chmod +x /docker-entrypoint.sh \ - && /docker-entrypoint.sh --bootstrap + && sync \ + && bash /docker-entrypoint.sh --bootstrap ENTRYPOINT ["/docker-entrypoint.sh"] diff --git a/data/Dockerfiles/solr/docker-entrypoint.sh b/data/Dockerfiles/solr/docker-entrypoint.sh index 108f8b5a..1c5c6f51 100755 --- a/data/Dockerfiles/solr/docker-entrypoint.sh +++ b/data/Dockerfiles/solr/docker-entrypoint.sh @@ -18,403 +18,44 @@ fi set -e -# allow easier debugging with `docker run -e VERBOSE=yes` -if [[ "$VERBOSE" = "yes" ]]; then - set -x -fi - # run the optional initdb . /opt/docker-solr/scripts/run-initdb -function solr_config() { - curl -XPOST http://localhost:8983/solr/dovecot/schema -H 'Content-type:application/json' -d '{ - "add-field-type":{ - "name":"long", - "class":"solr.TrieLongField" - }, - "add-field-type":{ - "name":"dovecot_text", - "class":"solr.TextField", - "autoGeneratePhraseQueries":true, - "positionIncrementGap":100, - "indexAnalyser":{ - "charFilter":{ - "class":"solr.MappingCharFilterFactory", - "mapping":"mapping-FoldToASCII.txt" - }, - "charFilter":{ - "class":"solr.MappingCharFilterFactory", - "mapping":"mapping-ISOLatin1Accent.txt" - }, - "charFilter":{ - "class":"solr.HTMLStripCharFilterFactory" - }, - "tokenizer":{ - "class":"solr.StandardTokenizerFactory" - }, - "filter":{ - "class":"solr.StopFilterFactory", - "words":"stopwords.txt", - "ignoreCase":true - }, - "filter":{ - "class":"solr.WordDelimiterGraphFilterFactory", - "generateWordParts":1, - "generateNumberParts":1, - "splitOnCaseChange":1, - "splitOnNumerics":1, - "catenateWords":1, - "catenateNumbers":1, - "catenateAll":1 - }, - "filter":{ - "class":"solr.FlattenGraphFilterFactory" - }, - "filter":{ - "class":"solr.LowerCaseFilterFactory" - }, - "filter":{ - "class":"solr.KeywordMarkerFilterFactory", - "protected":"protwords.txt" - }, - "filter":{ - "class":"solr.PorterStemFilterFactory" - } - }, - "queryAnalyzer":{ - "tokenizer":{ - "class":"solr.StandardTokenizerFactory" - }, - "filter":{ - "class":"solr.SynonymGraphFilterFactory", - "expand":true, - "ignoreCase":true, - "synonyms":synonyms.txt - }, - "filter":{ - "class":"solr.FlattenGraphFilterFactory" - }, - "filter":{ - "class":"solr.StopFilterFactory", - "words":"stopwords.txt", - "ignoreCase":true - }, - "filter":{ - "class":"solr.WordDelimiterGraphFilterFactory", - "generateWordParts":1, - "generateNumberParts":1, - "splitOnCaseChange":1, - "splitOnNumerics":1, - "catenateWords":1, - "catenateNumbers":1, - "catenateAll":1 - }, - "filter":{ - "class":"solr.LowerCaseFilterFactory" - }, - "filter":{ - "class":"solr.KeywordMarkerFilterFactory", - "protected":"protwords.txt" - }, - "filter":{ - "class":"solr.PorterStemFilterFactory" - } - } - }, - "add-field":{ - "name":"uid", - "type":"long", - "indexed":true, - "stored":true, - "required":true - }, - "add-field":{ - "name":"box", - "type":"string", - "indexed":true, - "stored":true, - "required":true - }, - "add-field":{ - "name":"user", - "type":"string", - "indexed":true, - "stored":true, - "required":true - }, - "add-field":{ - "name":"hdr", - "type":"dovecot_text", - "indexed":true, - "stored":false - - }, - "add-field":{ - "name":"body", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "add-field":{ - "name":"from", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "add-field":{ - "name":"to", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "add-field":{ - "name":"cc", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "add-field":{ - "name":"bcc", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "add-field":{ - "name":"subject", - "type":"dovecot_text", - "indexed":true, - "stored":false - } - }' - - curl -XPOST http://localhost:8983/solr/dovecot/schema -H 'Content-type:application/json' -d '{ - "replace-field-type":{ - "name":"long", - "class":"solr.TrieLongField" - }, - "replace-field-type":{ - "name":"dovecot_text", - "class":"solr.TextField", - "autoGeneratePhraseQueries":true, - "positionIncrementGap":100, - "indexAnalyser":{ - "charFilter":{ - "class":"solr.MappingCharFilterFactory", - "mapping":"mapping-FoldToASCII.txt" - }, - "charFilter":{ - "class":"solr.MappingCharFilterFactory", - "mapping":"mapping-ISOLatin1Accent.txt" - }, - "charFilter":{ - "class":"solr.HTMLStripCharFilterFactory" - }, - "tokenizer":{ - "class":"solr.StandardTokenizerFactory" - }, - "filter":{ - "class":"solr.StopFilterFactory", - "words":"stopwords.txt", - "ignoreCase":true - }, - "filter":{ - "class":"solr.WordDelimiterGraphFilterFactory", - "generateWordParts":1, - "generateNumberParts":1, - "splitOnCaseChange":1, - "splitOnNumerics":1, - "catenateWords":1, - "catenateNumbers":1, - "catenateAll":1 - }, - "filter":{ - "class":"solr.FlattenGraphFilterFactory" - }, - "filter":{ - "class":"solr.LowerCaseFilterFactory" - }, - "filter":{ - "class":"solr.KeywordMarkerFilterFactory", - "protected":"protwords.txt" - }, - "filter":{ - "class":"solr.PorterStemFilterFactory" - } - }, - "queryAnalyzer":{ - "tokenizer":{ - "class":"solr.StandardTokenizerFactory" - }, - "filter":{ - "class":"solr.SynonymGraphFilterFactory", - "expand":true, - "ignoreCase":true, - "synonyms":synonyms.txt - }, - "filter":{ - "class":"solr.FlattenGraphFilterFactory" - }, - "filter":{ - "class":"solr.StopFilterFactory", - "words":"stopwords.txt", - "ignoreCase":true - }, - "filter":{ - "class":"solr.WordDelimiterGraphFilterFactory", - "generateWordParts":1, - "generateNumberParts":1, - "splitOnCaseChange":1, - "splitOnNumerics":1, - "catenateWords":1, - "catenateNumbers":1, - "catenateAll":1 - }, - "filter":{ - "class":"solr.LowerCaseFilterFactory" - }, - "filter":{ - "class":"solr.KeywordMarkerFilterFactory", - "protected":"protwords.txt" - }, - "filter":{ - "class":"solr.PorterStemFilterFactory" - } - } - }, - "replace-field":{ - "name":"uid", - "type":"long", - "indexed":true, - "stored":true, - "required":true - }, - "replace-field":{ - "name":"box", - "type":"string", - "indexed":true, - "stored":true, - "required":true - }, - "replace-field":{ - "name":"user", - "type":"string", - "indexed":true, - "stored":true, - "required":true - }, - "replace-field":{ - "name":"hdr", - "type":"dovecot_text", - "indexed":true, - "stored":false - - }, - "replace-field":{ - "name":"body", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "replace-field":{ - "name":"from", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "replace-field":{ - "name":"to", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "replace-field":{ - "name":"cc", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "replace-field":{ - "name":"bcc", - "type":"dovecot_text", - "indexed":true, - "stored":false - }, - "replace-field":{ - "name":"subject", - "type":"dovecot_text", - "indexed":true, - "stored":false - } - }' - - curl -XPOST http://localhost:8983/solr/dovecot/config -H 'Content-type:application/json' -d '{ - "update-requesthandler":{ - "name":"/select", - "class":"solr.SearchHandler", - "defaults":{ - "wt":"xml" - } - } - }' - - curl -XPOST http://localhost:8983/solr/dovecot/config/updateHandler -d '{ - "set-property": { - "updateHandler.autoSoftCommit.maxDocs":500, - "updateHandler.autoSoftCommit.maxTime":120000, - "updateHandler.autoCommit.maxDocs":200, - "updateHandler.autoCommit.maxTime":1800000, - "updateHandler.autoCommit.openSearcher":false - } - }' -} - # fixing volume permission - -[[ -d /opt/solr/server/solr/dovecot/data ]] && chown -R solr:solr /opt/solr/server/solr/dovecot/data +[[ -d /opt/solr/server/solr/dovecot-fts/data ]] && chown -R solr:solr /opt/solr/server/solr/dovecot-fts/data if [[ "${1}" != "--bootstrap" ]]; then sed -i '/SOLR_HEAP=/c\SOLR_HEAP="'${SOLR_HEAP:-1024}'m"' /opt/solr/bin/solr.in.sh else sed -i '/SOLR_HEAP=/c\SOLR_HEAP="256m"' /opt/solr/bin/solr.in.sh fi -# start a Solr so we can use the Schema API, but only on localhost, -# so that clients don't see Solr until we have configured it. +if [[ "${1}" == "--bootstrap" ]]; then + echo "Creating initial configuration" + echo "Modifying default config set" + cp /solr-config-7.7.0.xml /opt/solr/server/solr/configsets/_default/conf/solrconfig.xml + cp /solr-schema-7.7.0.xml /opt/solr/server/solr/configsets/_default/conf/schema.xml + rm /opt/solr/server/solr/configsets/_default/conf/managed-schema -echo "Starting local Solr instance to setup configuration" -su-exec solr start-local-solr + echo "Starting local Solr instance to setup configuration" + gosu solr start-local-solr -# keep a sentinel file so we don't try to create the core a second time -# for example when we restart a container. - -SENTINEL=/opt/docker-solr/core_created - -if [[ -f ${SENTINEL} ]]; then - echo "skipping core creation" -else - echo "Creating core \"dovecot\"" - su-exec solr /opt/solr/bin/solr create -c "dovecot" + echo "Creating core \"dovecot-fts\"" + gosu solr /opt/solr/bin/solr create -c "dovecot-fts" # See https://github.com/docker-solr/docker-solr/issues/27 echo "Checking core" while ! wget -O - 'http://localhost:8983/solr/admin/cores?action=STATUS' | grep -q instanceDir; do echo "Could not find any cores, waiting..." - sleep 5 + sleep 3 done - echo "Created core \"dovecot\"" - touch ${SENTINEL} -fi -echo "Starting configuration" -while ! wget -O - 'http://localhost:8983/solr/admin/cores?action=STATUS' | grep -q instanceDir; do - echo "Waiting for Solr..." - sleep 5 -done -solr_config -echo "Stopping local Solr" -su-exec solr stop-local-solr + echo "Created core \"dovecot-fts\"" + + echo "Stopping local Solr" + gosu solr stop-local-solr -if [[ "${1}" == "--bootstrap" ]]; then exit 0 -else - exec su-exec solr solr-foreground fi + +exec gosu solr solr-foreground + diff --git a/data/Dockerfiles/solr/solr-config-7.7.0.xml b/data/Dockerfiles/solr/solr-config-7.7.0.xml new file mode 100644 index 00000000..3661874d --- /dev/null +++ b/data/Dockerfiles/solr/solr-config-7.7.0.xml @@ -0,0 +1,289 @@ + + + + + + + 7.7.0 + + + + + + + + + + + + + + + + ${solr.data.dir:} + + + + + + + ${solr.ulog.dir:} + ${solr.ulog.numVersionBuckets:65536} + + + + + ${solr.autoCommit.maxTime:15000} + false + + + + + ${solr.autoSoftCommit.maxTime:-1} + + + + + + + + + + + + + + + + + + + + + + + + true + + + 20 + + + 200 + + + false + + + + + + + + + + + + + + + explicit + 10 + + + + + + _text_ + + + + + + diff --git a/data/Dockerfiles/solr/solr-schema-7.7.0.xml b/data/Dockerfiles/solr/solr-schema-7.7.0.xml new file mode 100644 index 00000000..2c2e6343 --- /dev/null +++ b/data/Dockerfiles/solr/solr-schema-7.7.0.xml @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + diff --git a/data/Dockerfiles/unbound/Dockerfile b/data/Dockerfiles/unbound/Dockerfile index 7658a8f8..35c6fc0d 100644 --- a/data/Dockerfiles/unbound/Dockerfile +++ b/data/Dockerfiles/unbound/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.9 +FROM alpine:3.10 LABEL maintainer "Andre Peters " diff --git a/data/Dockerfiles/watchdog/Dockerfile b/data/Dockerfiles/watchdog/Dockerfile index 7ab29b68..a884944b 100644 --- a/data/Dockerfiles/watchdog/Dockerfile +++ b/data/Dockerfiles/watchdog/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.9 +FROM alpine:3.10 LABEL maintainer "André Peters " # Installation @@ -7,11 +7,13 @@ RUN apk add --update \ nagios-plugins-tcp \ nagios-plugins-http \ nagios-plugins-ping \ + mariadb-client \ curl \ bash \ coreutils \ jq \ fcgi \ + openssl \ nagios-plugins-mysql \ nagios-plugins-dns \ nagios-plugins-disk \ @@ -26,11 +28,13 @@ RUN apk add --update \ perl-term-readkey \ tini \ tzdata \ + whois \ && curl https://raw.githubusercontent.com/mludvig/smtp-cli/v3.9/smtp-cli -o /smtp-cli \ && chmod +x smtp-cli COPY watchdog.sh /watchdog.sh -ENTRYPOINT ["/sbin/tini", "-g", "--"] +#ENTRYPOINT ["/sbin/tini", "-g", "--"] # Less verbose + CMD /watchdog.sh 2> /dev/null diff --git a/data/Dockerfiles/watchdog/watchdog.sh b/data/Dockerfiles/watchdog/watchdog.sh index ed9b568d..932f8ef5 100755 --- a/data/Dockerfiles/watchdog/watchdog.sh +++ b/data/Dockerfiles/watchdog/watchdog.sh @@ -5,6 +5,8 @@ trap "kill 0" EXIT # Prepare BACKGROUND_TASKS=() +echo "Waiting for containers to settle..." +sleep 10 if [[ "${USE_WATCHDOG}" =~ ^([nN][oO]|[nN])+$ ]]; then echo -e "$(date) - USE_WATCHDOG=n, skipping watchdog..." @@ -17,7 +19,28 @@ if [[ ! -p /tmp/com_pipe ]]; then mkfifo /tmp/com_pipe fi +# Wait for containers +while ! mysqladmin status --socket=/var/run/mysqld/mysqld.sock -u${DBUSER} -p${DBPASS} --silent; do + echo "Waiting for SQL..." + sleep 2 +done + +until [[ $(redis-cli -h redis-mailcow PING) == "PONG" ]]; do + echo "Waiting for Redis..." + sleep 2 +done + +redis-cli -h redis-mailcow DEL F2B_RES > /dev/null + # Common functions +array_diff() { + # https://stackoverflow.com/questions/2312762, Alex Offshore + eval local ARR1=\(\"\${$2[@]}\"\) + eval local ARR2=\(\"\${$3[@]}\"\) + local IFS=$'\n' + mapfile -t $1 < <(comm -23 <(echo "${ARR1[*]}" | sort) <(echo "${ARR2[*]}" | sort)) +} + progress() { SERVICE=${1} TOTAL=${2} @@ -37,7 +60,7 @@ progress() { log_msg() { if [[ ${2} != "no_redis" ]]; then redis-cli -h redis LPUSH WATCHDOG_LOG "{\"time\":\"$(date +%s)\",\"message\":\"$(printf '%s' "${1}" | \ - tr '%&;$"_[]{}-\r\n' ' ')\"}" > /dev/null + tr '\r\n%&;$"_[]{}-' ' ')\"}" > /dev/null fi echo $(date) $(printf '%s\n' "${1}") } @@ -46,6 +69,13 @@ function mail_error() { [[ -z ${1} ]] && return 1 [[ -z ${2} ]] && BODY="Service was restarted on $(date), please check your mailcow installation." || BODY="$(date) - ${2}" WATCHDOG_NOTIFY_EMAIL=$(echo "${WATCHDOG_NOTIFY_EMAIL}" | sed 's/"//;s|"$||') + # Some exceptions for subject and body formats + if [[ ${1} == "fail2ban" ]]; then + SUBJECT="${BODY}" + BODY="Please see netfilter-mailcow for more details and triggered rules." + else + SUBJECT="Watchdog ALERT: ${1}" + fi IFS=',' read -r -a MAIL_RCPTS <<< "${WATCHDOG_NOTIFY_EMAIL}" for rcpt in "${MAIL_RCPTS[@]}"; do RCPT_DOMAIN= @@ -56,15 +86,15 @@ function mail_error() { log_msg "Cannot determine MX for ${rcpt}, skipping email notification..." return 1 fi - [ -f "/tmp/${1}" ] && ATTACH="--attach /tmp/${1}@text/plain" || ATTACH= - ./smtp-cli --missing-modules-ok \ - --subject="Watchdog: ${1} hit the error rate limit" \ + [ -f "/tmp/${1}" ] && BODY="/tmp/${1}" + timeout 10s ./smtp-cli --missing-modules-ok \ + --charset=UTF-8 \ + --subject="${SUBJECT}" \ --body-plain="${BODY}" \ --to=${rcpt} \ --from="watchdog@${MAILCOW_HOSTNAME}" \ --server="${RCPT_MX}" \ - --hello-host=${MAILCOW_HOSTNAME} \ - ${ATTACH} + --hello-host=${MAILCOW_HOSTNAME} log_msg "Sent notification email to ${rcpt}" done } @@ -111,11 +141,11 @@ get_container_ip() { nginx_checks() { err_count=0 diff_c=0 - THRESHOLD=16 + THRESHOLD=5 # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/nginx-mailcow + touch /tmp/nginx-mailcow; echo "$(tail -50 /tmp/nginx-mailcow)" > /tmp/nginx-mailcow host_ip=$(get_container_ip nginx-mailcow) err_c_cur=${err_count} /usr/lib/nagios/plugins/check_http -4 -H ${host_ip} -u / -p 8081 2>> /tmp/nginx-mailcow 1>&2; err_count=$(( ${err_count} + $? )) @@ -127,7 +157,7 @@ nginx_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -136,11 +166,11 @@ nginx_checks() { unbound_checks() { err_count=0 diff_c=0 - THRESHOLD=8 + THRESHOLD=5 # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/unbound-mailcow + touch /tmp/unbound-mailcow; echo "$(tail -50 /tmp/unbound-mailcow)" > /tmp/unbound-mailcow host_ip=$(get_container_ip unbound-mailcow) err_c_cur=${err_count} /usr/lib/nagios/plugins/check_dns -s ${host_ip} -H stackoverflow.com 2>> /tmp/unbound-mailcow 1>&2; err_count=$(( ${err_count} + $? )) @@ -159,7 +189,32 @@ unbound_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) + fi + done + return 1 +} + +redis_checks() { + err_count=0 + diff_c=0 + THRESHOLD=5 + # Reduce error count by 2 after restarting an unhealthy container + trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 + while [ ${err_count} -lt ${THRESHOLD} ]; do + touch /tmp/redis-mailcow; echo "$(tail -50 /tmp/redis-mailcow)" > /tmp/redis-mailcow + host_ip=$(get_container_ip redis-mailcow) + err_c_cur=${err_count} + /usr/lib/nagios/plugins/check_tcp -4 -H redis-mailcow -p 6379 -E -s "PING\n" -q "QUIT" -e "PONG" 2>> /tmp/redis-mailcow 1>&2; err_count=$(( ${err_count} + $? )) + [ ${err_c_cur} -eq ${err_count} ] && [ ! $((${err_count} - 1)) -lt 0 ] && err_count=$((${err_count} - 1)) diff_c=1 + [ ${err_c_cur} -ne ${err_count} ] && diff_c=$(( ${err_c_cur} - ${err_count} )) + progress "Redis" ${THRESHOLD} $(( ${THRESHOLD} - ${err_count} )) ${diff_c} + if [[ $? == 10 ]]; then + diff_c=0 + sleep 1 + else + diff_c=0 + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -168,11 +223,11 @@ unbound_checks() { mysql_checks() { err_count=0 diff_c=0 - THRESHOLD=12 + THRESHOLD=5 # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/mysql-mailcow + touch /tmp/mysql-mailcow; echo "$(tail -50 /tmp/mysql-mailcow)" > /tmp/mysql-mailcow host_ip=$(get_container_ip mysql-mailcow) err_c_cur=${err_count} /usr/lib/nagios/plugins/check_mysql -s /var/run/mysqld/mysqld.sock -u ${DBUSER} -p ${DBPASS} -d ${DBNAME} 2>> /tmp/mysql-mailcow 1>&2; err_count=$(( ${err_count} + $? )) @@ -185,7 +240,7 @@ mysql_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -194,11 +249,11 @@ mysql_checks() { sogo_checks() { err_count=0 diff_c=0 - THRESHOLD=10 + THRESHOLD=5 # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/sogo-mailcow + touch /tmp/sogo-mailcow; echo "$(tail -50 /tmp/sogo-mailcow)" > /tmp/sogo-mailcow host_ip=$(get_container_ip sogo-mailcow) err_c_cur=${err_count} /usr/lib/nagios/plugins/check_http -4 -H ${host_ip} -u /SOGo.index/ -p 20000 -R "SOGo\.MainUI" 2>> /tmp/sogo-mailcow 1>&2; err_count=$(( ${err_count} + $? )) @@ -210,7 +265,7 @@ sogo_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -223,10 +278,10 @@ postfix_checks() { # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/postfix-mailcow + touch /tmp/postfix-mailcow; echo "$(tail -50 /tmp/postfix-mailcow)" > /tmp/postfix-mailcow host_ip=$(get_container_ip postfix-mailcow) err_c_cur=${err_count} - /usr/lib/nagios/plugins/check_smtp -4 -H ${host_ip} -p 589 -f "watchdog@invalid" -C "RCPT TO:null@localhost" -C DATA -C . -R 250 2>> /tmp/postfix-mailcow 1>&2; err_count=$(( ${err_count} + $? )) + /usr/lib/nagios/plugins/check_smtp -4 -H ${host_ip} -p 589 -f "watchdog@invalid" -C "RCPT TO:watchdog@localhost" -C DATA -C . -R 250 2>> /tmp/postfix-mailcow 1>&2; err_count=$(( ${err_count} + $? )) /usr/lib/nagios/plugins/check_smtp -4 -H ${host_ip} -p 589 -S 2>> /tmp/postfix-mailcow 1>&2; err_count=$(( ${err_count} + $? )) [ ${err_c_cur} -eq ${err_count} ] && [ ! $((${err_count} - 1)) -lt 0 ] && err_count=$((${err_count} - 1)) diff_c=1 [ ${err_c_cur} -ne ${err_count} ] && diff_c=$(( ${err_c_cur} - ${err_count} )) @@ -236,7 +291,7 @@ postfix_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -245,11 +300,11 @@ postfix_checks() { clamd_checks() { err_count=0 diff_c=0 - THRESHOLD=5 + THRESHOLD=15 # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/clamd-mailcow + touch /tmp/clamd-mailcow; echo "$(tail -50 /tmp/clamd-mailcow)" > /tmp/clamd-mailcow host_ip=$(get_container_ip clamd-mailcow) err_c_cur=${err_count} /usr/lib/nagios/plugins/check_clamd -4 -H ${host_ip} 2>> /tmp/clamd-mailcow 1>&2; err_count=$(( ${err_count} + $? )) @@ -261,7 +316,7 @@ clamd_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 120 ) + 20 )) fi done return 1 @@ -270,11 +325,11 @@ clamd_checks() { dovecot_checks() { err_count=0 diff_c=0 - THRESHOLD=20 + THRESHOLD=12 # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/dovecot-mailcow + touch /tmp/dovecot-mailcow; echo "$(tail -50 /tmp/dovecot-mailcow)" > /tmp/dovecot-mailcow host_ip=$(get_container_ip dovecot-mailcow) err_c_cur=${err_count} /usr/lib/nagios/plugins/check_smtp -4 -H ${host_ip} -p 24 -f "watchdog@invalid" -C "RCPT TO:" -L -R "User doesn't exist" 2>> /tmp/dovecot-mailcow 1>&2; err_count=$(( ${err_count} + $? )) @@ -290,7 +345,7 @@ dovecot_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -303,7 +358,7 @@ phpfpm_checks() { # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/php-fpm-mailcow + touch /tmp/php-fpm-mailcow; echo "$(tail -50 /tmp/php-fpm-mailcow)" > /tmp/php-fpm-mailcow host_ip=$(get_container_ip php-fpm-mailcow) err_c_cur=${err_count} /usr/lib/nagios/plugins/check_tcp -H ${host_ip} -p 9001 2>> /tmp/php-fpm-mailcow 1>&2; err_count=$(( ${err_count} + $? )) @@ -316,7 +371,7 @@ phpfpm_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -344,7 +399,73 @@ ratelimit_checks() { sleep 1 else diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + sleep $(( ( RANDOM % 60 ) + 20 )) + fi + done + return 1 +} + +fail2ban_checks() { + err_count=0 + diff_c=0 + THRESHOLD=1 + F2B_LOG_STATUS=($(redis-cli -h redis-mailcow --raw HKEYS F2B_ACTIVE_BANS)) + F2B_RES= + # Reduce error count by 2 after restarting an unhealthy container + trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 + while [ ${err_count} -lt ${THRESHOLD} ]; do + err_c_cur=${err_count} + F2B_LOG_STATUS_PREV=(${F2B_LOG_STATUS[@]}) + F2B_LOG_STATUS=($(redis-cli -h redis-mailcow --raw HKEYS F2B_ACTIVE_BANS)) + array_diff F2B_RES F2B_LOG_STATUS F2B_LOG_STATUS_PREV + if [[ ! -z "${F2B_RES}" ]]; then + err_count=$(( ${err_count} + 1 )) + echo -n "${F2B_RES[@]}" | tr -cd "[a-fA-F0-9.:/] " | timeout 3s redis-cli -x -h redis-mailcow SET F2B_RES > /dev/null + if [ $? -ne 0 ]; then + redis-cli -x -h redis-mailcow DEL F2B_RES + fi + fi + [ ${err_c_cur} -eq ${err_count} ] && [ ! $((${err_count} - 1)) -lt 0 ] && err_count=$((${err_count} - 1)) diff_c=1 + [ ${err_c_cur} -ne ${err_count} ] && diff_c=$(( ${err_c_cur} - ${err_count} )) + progress "Fail2ban" ${THRESHOLD} $(( ${THRESHOLD} - ${err_count} )) ${diff_c} + if [[ $? == 10 ]]; then + diff_c=0 + sleep 1 + else + diff_c=0 + sleep $(( ( RANDOM % 60 ) + 20 )) + fi + done + return 1 +} + +acme_checks() { + err_count=0 + diff_c=0 + THRESHOLD=1 + ACME_LOG_STATUS=$(redis-cli -h redis GET ACME_FAIL_TIME) + if [[ -z "${ACME_LOG_STATUS}" ]]; then + redis-cli -h redis SET ACME_FAIL_TIME 0 + ACME_LOG_STATUS=0 + fi + # Reduce error count by 2 after restarting an unhealthy container + trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 + while [ ${err_count} -lt ${THRESHOLD} ]; do + err_c_cur=${err_count} + ACME_LOG_STATUS_PREV=${ACME_LOG_STATUS} + ACME_LOG_STATUS=$(redis-cli -h redis GET ACME_FAIL_TIME) + if [[ ${ACME_LOG_STATUS_PREV} != ${ACME_LOG_STATUS} ]]; then + err_count=$(( ${err_count} + 1 )) + fi + [ ${err_c_cur} -eq ${err_count} ] && [ ! $((${err_count} - 1)) -lt 0 ] && err_count=$((${err_count} - 1)) diff_c=1 + [ ${err_c_cur} -ne ${err_count} ] && diff_c=$(( ${err_c_cur} - ${err_count} )) + progress "ACME" ${THRESHOLD} $(( ${THRESHOLD} - ${err_count} )) ${diff_c} + if [[ $? == 10 ]]; then + diff_c=0 + sleep 1 + else + diff_c=0 + sleep $(( ( RANDOM % 60 ) + 20 )) fi done return 1 @@ -358,10 +479,11 @@ ipv6nat_checks() { trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do err_c_cur=${err_count} - IPV6NAT_CONTAINER_ID=$(curl --silent --insecure https://dockerapi/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"ipv6nat-mailcow\")) | .id") + CONTAINERS=$(curl --silent --insecure https://dockerapi/containers/json) + IPV6NAT_CONTAINER_ID=$(echo ${CONTAINERS} | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"ipv6nat-mailcow\")) | .id") if [[ ! -z ${IPV6NAT_CONTAINER_ID} ]]; then - LATEST_STARTED="$(curl --silent --insecure https://dockerapi/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], StartedAt: .State.StartedAt}" | jq -rc "select( .name | tostring | contains(\"ipv6nat-mailcow\") | not)" | jq -rc .StartedAt | xargs -n1 date +%s -d | sort | tail -n1)" - LATEST_IPV6NAT="$(curl --silent --insecure https://dockerapi/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], StartedAt: .State.StartedAt}" | jq -rc "select( .name | tostring | contains(\"ipv6nat-mailcow\"))" | jq -rc .StartedAt | xargs -n1 date +%s -d | sort | tail -n1)" + LATEST_STARTED="$(echo ${CONTAINERS} | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], StartedAt: .State.StartedAt}" | jq -rc "select( .name | tostring | contains(\"ipv6nat-mailcow\") | not)" | jq -rc .StartedAt | xargs -n1 date +%s -d | sort | tail -n1)" + LATEST_IPV6NAT="$(echo ${CONTAINERS} | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], StartedAt: .State.StartedAt}" | jq -rc "select( .name | tostring | contains(\"ipv6nat-mailcow\"))" | jq -rc .StartedAt | xargs -n1 date +%s -d | sort | tail -n1)" DIFFERENCE_START_TIME=$(expr ${LATEST_IPV6NAT} - ${LATEST_STARTED} 2>/dev/null) if [[ "${DIFFERENCE_START_TIME}" -lt 30 ]]; then err_count=$(( ${err_count} + 1 )) @@ -372,15 +494,16 @@ ipv6nat_checks() { progress "IPv6 NAT" ${THRESHOLD} $(( ${THRESHOLD} - ${err_count} )) ${diff_c} if [[ $? == 10 ]]; then diff_c=0 - sleep 1 + sleep 30 else diff_c=0 - sleep 3600 + sleep 300 fi done return 1 } + rspamd_checks() { err_count=0 diff_c=0 @@ -388,15 +511,14 @@ rspamd_checks() { # Reduce error count by 2 after restarting an unhealthy container trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 while [ ${err_count} -lt ${THRESHOLD} ]; do - cat /dev/null > /tmp/rspamd-mailcow + touch /tmp/rspamd-mailcow; echo "$(tail -50 /tmp/rspamd-mailcow)" > /tmp/rspamd-mailcow host_ip=$(get_container_ip rspamd-mailcow) err_c_cur=${err_count} - SCORE=$(/usr/bin/curl -s --data-binary @- --unix-socket /var/lib/rspamd/rspamd.sock http://rspamd/scan -d ' -To: null@localhost + SCORE=$(echo 'To: null@localhost From: watchdog@localhost Empty -' | jq -rc .required_score) +' | usr/bin/curl -s --data-binary @- --unix-socket /var/lib/rspamd/rspamd.sock http://rspamd/scan | jq -rc .required_score) if [[ ${SCORE} != "9999" ]]; then echo "Rspamd settings check failed" 2>> /tmp/rspamd-mailcow 1>&2 err_count=$(( ${err_count} + 1)) @@ -406,13 +528,49 @@ Empty [ ${err_c_cur} -eq ${err_count} ] && [ ! $((${err_count} - 1)) -lt 0 ] && err_count=$((${err_count} - 1)) diff_c=1 [ ${err_c_cur} -ne ${err_count} ] && diff_c=$(( ${err_c_cur} - ${err_count} )) progress "Rspamd" ${THRESHOLD} $(( ${THRESHOLD} - ${err_count} )) ${diff_c} - diff_c=0 - sleep $(( ( RANDOM % 30 ) + 10 )) + if [[ $? == 10 ]]; then + diff_c=0 + sleep 1 + else + diff_c=0 + sleep $(( ( RANDOM % 60 ) + 20 )) + fi done return 1 } +olefy_checks() { + err_count=0 + diff_c=0 + THRESHOLD=5 + # Reduce error count by 2 after restarting an unhealthy container + trap "[ ${err_count} -gt 1 ] && err_count=$(( ${err_count} - 2 ))" USR1 + while [ ${err_count} -lt ${THRESHOLD} ]; do + touch /tmp/olefy-mailcow; echo "$(tail -50 /tmp/olefy-mailcow)" > /tmp/olefy-mailcow + host_ip=$(get_container_ip olefy-mailcow) + err_c_cur=${err_count} + /usr/lib/nagios/plugins/check_tcp -4 -H ${host_ip} -p 10055 2>> /tmp/olefy-mailcow 1>&2; err_count=$(( ${err_count} + $? )) + [ ${err_c_cur} -eq ${err_count} ] && [ ! $((${err_count} - 1)) -lt 0 ] && err_count=$((${err_count} - 1)) diff_c=1 + [ ${err_c_cur} -ne ${err_count} ] && diff_c=$(( ${err_c_cur} - ${err_count} )) + progress "Olefy" ${THRESHOLD} $(( ${THRESHOLD} - ${err_count} )) ${diff_c} + if [[ $? == 10 ]]; then + diff_c=0 + sleep 1 + else + diff_c=0 + sleep $(( ( RANDOM % 60 ) + 20 )) + fi + done + return 1 +} + +# Notify about start +if [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]]; then + mail_error "watchdog-mailcow" "Watchdog started monitoring mailcow." +fi + # Create watchdog agents + ( while true; do if ! nginx_checks; then @@ -421,7 +579,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned nginx_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) ( while true; do @@ -431,7 +591,21 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned mysql_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) + +( +while true; do + if ! redis_checks; then + log_msg "Redis hit error limit" + echo redis-mailcow > /tmp/com_pipe + fi +done +) & +PID=$! +echo "Spawned redis_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) ( while true; do @@ -441,7 +615,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned phpfpm_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) ( while true; do @@ -451,7 +627,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned sogo_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) if [ ${CHECK_UNBOUND} -eq 1 ]; then ( @@ -462,7 +640,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned unbound_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) fi if [[ "${SKIP_CLAMD}" =~ ^([nN][oO]|[nN])+$ ]]; then @@ -474,7 +654,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned clamd_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) fi ( @@ -485,7 +667,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned postfix_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) ( while true; do @@ -495,7 +679,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned dovecot_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) ( while true; do @@ -505,7 +691,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned rspamd_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) ( while true; do @@ -515,7 +703,45 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned ratelimit_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) + +( +while true; do + if ! fail2ban_checks; then + log_msg "Fail2ban hit error limit" + echo fail2ban > /tmp/com_pipe + fi +done +) & +PID=$! +echo "Spawned fail2ban_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) + +#( +#while true; do +# if ! olefy_checks; then +# log_msg "Olefy hit error limit" +# echo olefy-mailcow > /tmp/com_pipe +# fi +#done +#) & +#PID=$! +#echo "Spawned olefy_checks with PID ${PID}" +#BACKGROUND_TASKS+=(${PID}) + +( +while true; do + if ! acme_checks; then + log_msg "ACME client hit error limit" + echo acme-mailcow > /tmp/com_pipe + fi +done +) & +PID=$! +echo "Spawned acme_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) ( while true; do @@ -525,7 +751,9 @@ while true; do fi done ) & -BACKGROUND_TASKS+=($!) +PID=$! +echo "Spawned ipv6nat_checks with PID ${PID}" +BACKGROUND_TASKS+=(${PID}) # Monitor watchdog agents, stop script when agents fails and wait for respawn by Docker (restart:always:n) ( @@ -556,25 +784,43 @@ while true; do done ) & -# Restart container when threshold limit reached +# Actions when threshold limit is reached while true; do CONTAINER_ID= HAS_INITDB= read com_pipe_answer /dev/null)) + if [[ ! -z "${F2B_RES}" ]]; then + redis-cli -h redis-mailcow DEL F2B_RES > /dev/null + host= + for host in "${F2B_RES[@]}"; do + log_msg "Banned ${host}" + rm /tmp/fail2ban 2> /dev/null + timeout 2s whois "${host}" > /tmp/fail2ban + [[ ! -z ${WATCHDOG_NOTIFY_EMAIL} ]] && [[ ${WATCHDOG_NOTIFY_BAN} =~ ^([yY][eE][sS]|[yY])+$ ]] && mail_error "${com_pipe_answer}" "IP ban: ${host}" + done + fi + elif [[ ${com_pipe_answer} =~ .+-mailcow ]]; then kill -STOP ${BACKGROUND_TASKS[*]} - sleep 3 + sleep 10 CONTAINER_ID=$(curl --silent --insecure https://dockerapi/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"${com_pipe_answer}\")) | .id") if [[ ! -z ${CONTAINER_ID} ]]; then if [[ "${com_pipe_answer}" == "php-fpm-mailcow" ]]; then HAS_INITDB=$(curl --silent --insecure -XPOST https://dockerapi/containers/${CONTAINER_ID}/top | jq '.msg.Processes[] | contains(["php -c /usr/local/etc/php -f /web/inc/init_db.inc.php"])' | grep true) fi S_RUNNING=$(($(date +%s) - $(curl --silent --insecure https://dockerapi/containers/${CONTAINER_ID}/json | jq .State.StartedAt | xargs -n1 date +%s -d))) - if [ ${S_RUNNING} -lt 120 ]; then - log_msg "Container is running for less than 120 seconds, skipping action..." + if [ ${S_RUNNING} -lt 360 ]; then + log_msg "Container is running for less than 360 seconds, skipping action..." elif [[ ! -z ${HAS_INITDB} ]]; then log_msg "Database is being initialized by php-fpm-mailcow, not restarting but delaying checks for a minute..." sleep 60 @@ -589,6 +835,7 @@ while true; do fi fi kill -CONT ${BACKGROUND_TASKS[*]} + sleep 1 kill -USR1 ${BACKGROUND_TASKS[*]} fi done diff --git a/data/assets/nextcloud/nextcloud.conf b/data/assets/nextcloud/nextcloud.conf index cf90a32b..243cc406 100644 --- a/data/assets/nextcloud/nextcloud.conf +++ b/data/assets/nextcloud/nextcloud.conf @@ -75,7 +75,7 @@ server { deny all; } - location ~ ^/(?:index|remote|public|cron|core/ajax/update|status|ocs/v[12]|updater/.+|ocs-provider/.+)\.php(?:$|/) { + location ~ ^/(?:index|remote|public|cron|core/ajax/update|status|ocs/v[12]|updater/.+|oc[ms]-provider/.+)\.php(?:$|/) { fastcgi_split_path_info ^(.+\.php)(/.*)$; include fastcgi_params; fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; @@ -90,12 +90,12 @@ server { fastcgi_read_timeout 1200; } - location ~ ^/(?:updater|ocs-provider)(?:$|/) { + location ~ ^/(?:updater|oc[ms]-provider)(?:$|/) { try_files $uri/ =404; index index.php; } - location ~ \.(?:css|js|woff|svg|gif)$ { + location ~ \.(?:css|js|woff2?|svg|gif)$ { try_files $uri /index.php$uri$is_args$args; add_header Cache-Control "public, max-age=15778463"; add_header X-Content-Type-Options nosniff; diff --git a/data/assets/nextcloud/occ b/data/assets/nextcloud/occ index 2ae08001..5113ac01 100755 --- a/data/assets/nextcloud/occ +++ b/data/assets/nextcloud/occ @@ -1,2 +1,2 @@ #!/bin/bash -docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) /web/nextcloud/occ ${@} +docker exec -it -u www-data $(docker ps -f name=php-fpm-mailcow -q) php /web/nextcloud/occ ${@} diff --git a/data/assets/nextcloud/site.nextcloud.custom b/data/assets/nextcloud/site.nextcloud.custom deleted file mode 100644 index 6ac29902..00000000 --- a/data/assets/nextcloud/site.nextcloud.custom +++ /dev/null @@ -1,44 +0,0 @@ - location ^~ /nextcloud { - location /nextcloud { - rewrite ^ /nextcloud/index.php$uri; - } - location ~ ^/nextcloud/(?:build|tests|config|lib|3rdparty|templates|data)/ { - deny all; - } - location ~ ^/nextcloud/(?:\.|autotest|occ|issue|indie|db_|console) { - deny all; - } - location ~ ^/nextcloud/(?:index|remote|public|cron|core/ajax/update|status|ocs/v[12]|updater/.+|ocs-provider/.+)\.php(?:$|/) { - fastcgi_split_path_info ^(.+\.php)(/.*)$; - include fastcgi_params; - fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name; - fastcgi_param PATH_INFO $fastcgi_path_info; - fastcgi_param HTTPS on; - fastcgi_param modHeadersAvailable true; - fastcgi_param front_controller_active true; - fastcgi_pass phpfpm:9002; - fastcgi_intercept_errors on; - fastcgi_request_buffering off; - client_max_body_size 0; - fastcgi_read_timeout 1200; - } - location ~ ^/nextcloud/(?:updater|ocs-provider)(?:$|/) { - try_files $uri/ =404; - index index.php; - } - location ~ \.(?:css|js|woff|svg|gif)$ { - try_files $uri /nextcloud/index.php$uri$is_args$args; - add_header Cache-Control "public, max-age=15778463"; - add_header X-Content-Type-Options nosniff; - add_header X-XSS-Protection "1; mode=block"; - add_header X-Robots-Tag none; - add_header X-Download-Options noopen; - add_header X-Permitted-Cross-Domain-Policies none; - add_header X-Frame-Options "SAMEORIGIN"; - access_log off; - } - location ~ \.(?:png|html|ttf|ico|jpg|jpeg)$ { - try_files $uri /nextcloud/index.php$uri$is_args$args; - access_log off; - } - } diff --git a/data/conf/dovecot/dovecot.conf b/data/conf/dovecot/dovecot.conf index d693d39c..51e58710 100644 --- a/data/conf/dovecot/dovecot.conf +++ b/data/conf/dovecot/dovecot.conf @@ -3,7 +3,7 @@ # -------------------------------------------------------------------------- # LDAP example: #passdb { -# args = /usr/local/etc/dovecot/ldap/passdb.conf +# args = /etc/dovecot/ldap/passdb.conf # driver = ldap #} @@ -20,7 +20,7 @@ disable_plaintext_auth = yes login_log_format_elements = "user=<%u> method=%m rip=%r lip=%l mpid=%e %c %k" mail_home = /var/vmail/%d/%n mail_location = maildir:~/ -mail_plugins = user sieve_before (mailcow UI) -> user sieve_after (mailcow UI) -> global_sieve_after + require "fileinto"; require "mailbox"; require "variables"; diff --git a/data/conf/dovecot/global_sieve_before b/data/conf/dovecot/global_sieve_before new file mode 100644 index 00000000..e6a523d4 --- /dev/null +++ b/data/conf/dovecot/global_sieve_before @@ -0,0 +1,2 @@ +# global_sieve_before script +# global_sieve_before -> user sieve_before (mailcow UI) -> user sieve_after (mailcow UI) -> global_sieve_after diff --git a/data/conf/nginx/site.conf b/data/conf/nginx/site.conf index 8b8959d5..ccb94fa0 100644 --- a/data/conf/nginx/site.conf +++ b/data/conf/nginx/site.conf @@ -34,6 +34,7 @@ server { client_max_body_size 0; + listen 127.0.0.1:65510; include /etc/nginx/conf.d/listen_plain.active; include /etc/nginx/conf.d/listen_ssl.active; include /etc/nginx/conf.d/server_name.active; @@ -142,7 +143,19 @@ server { try_files /autoconfig.php =404; } + # auth_request endpoint if ALLOW_ADMIN_EMAIL_LOGIN is set + location /sogo-auth-verify { + internal; + proxy_set_header X-Original-URI $request_uri; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header Host $http_host; + proxy_set_header Content-Length ""; + proxy_pass http://127.0.0.1:65510/sogo-auth; + proxy_pass_request_body off; + } + location ^~ /Microsoft-Server-ActiveSync { + include /etc/nginx/conf.d/sogo_proxy_auth.active; include /etc/nginx/conf.d/sogo_eas.active; proxy_connect_timeout 4000; proxy_next_upstream timeout error; @@ -165,6 +178,7 @@ server { } location ^~ /SOGo { + include /etc/nginx/conf.d/sogo_proxy_auth.active; include /etc/nginx/conf.d/sogo.active; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; diff --git a/data/conf/nginx/templates/sogo.auth_request.template.sh b/data/conf/nginx/templates/sogo.auth_request.template.sh new file mode 100644 index 00000000..f6d2d98e --- /dev/null +++ b/data/conf/nginx/templates/sogo.auth_request.template.sh @@ -0,0 +1,10 @@ +if printf "%s\n" "${ALLOW_ADMIN_EMAIL_LOGIN}" | grep -E '^([yY][eE][sS]|[yY])+$' >/dev/null; then + echo 'auth_request /sogo-auth-verify; +auth_request_set $user $upstream_http_x_user; +auth_request_set $auth $upstream_http_x_auth; +auth_request_set $auth_type $upstream_http_x_auth_type; +proxy_set_header x-webobjects-remote-user "$user"; +proxy_set_header Authorization "$auth"; +proxy_set_header x-webobjects-auth-type "$auth_type"; +' +fi diff --git a/data/conf/phpfpm/sogo-sso/.gitkeep b/data/conf/phpfpm/sogo-sso/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/data/conf/postfix/anonymize_headers.pcre b/data/conf/postfix/anonymize_headers.pcre index 099094d9..7842b9e0 100644 --- a/data/conf/postfix/anonymize_headers.pcre +++ b/data/conf/postfix/anonymize_headers.pcre @@ -1,8 +1,11 @@ if /^\s*Received:.*Authenticated sender.*\(Postcow\)/ -/^\s*Received:.*Authenticated sender:(.+)/ - REPLACE Received: from localhost (localhost [127.0.0.1]) (Authenticated sender:$1 +#/^Received: from .*? \([\w-.]* \[.*?\]\)\s+\(Authenticated sender: (.+)\)\s+by.+\(Postcow\) with (E?SMTPS?A?) id ([A-F0-9]+).+;.*?/ +/^Received: from .*? \([\w-.]* \[.*?\]\)(.*|\n.*)\(Authenticated sender: (.+)\)\s+by.+\(Postcow\) with (.*)/ + REPLACE Received: from [127.0.0.1] (localhost [127.0.0.1]) by localhost (Mailerdaemon) with $2 endif /^\s*X-Enigmail/ IGNORE /^\s*X-Mailer/ IGNORE /^\s*X-Originating-IP/ IGNORE /^\s*X-Forward/ IGNORE +# Not removing UA by default, might be signed +#/^\s*User-Agent/ IGNORE diff --git a/data/conf/postfix/local_transport b/data/conf/postfix/local_transport new file mode 100644 index 00000000..6dd21011 --- /dev/null +++ b/data/conf/postfix/local_transport @@ -0,0 +1,2 @@ +/watchdog@localhost$/ watchdog_discard: +/localhost$/ local: diff --git a/data/conf/postfix/main.cf b/data/conf/postfix/main.cf index 83a252d8..50fcdfa8 100644 --- a/data/conf/postfix/main.cf +++ b/data/conf/postfix/main.cf @@ -1,3 +1,6 @@ +# -------------------------------------------------------------------------- +# Please create a file "extra.cf" for persistent overrides to main.cf +# -------------------------------------------------------------------------- biff = no append_dot_mydomain = no smtpd_tls_cert_file = /etc/ssl/mail/cert.pem @@ -6,7 +9,10 @@ smtpd_use_tls=yes smtpd_tls_received_header = yes smtpd_tls_session_cache_database = btree:${data_directory}/smtpd_scache smtp_tls_session_cache_database = btree:${data_directory}/smtp_scache -smtpd_relay_restrictions = permit_mynetworks permit_sasl_authenticated defer_unauth_destination +smtpd_relay_restrictions = permit_mynetworks, + permit_sasl_authenticated, + defer_unauth_destination +# alias maps are auto-generated in postfix.sh on startup alias_maps = hash:/etc/aliases alias_database = hash:/etc/aliases relayhost = @@ -19,20 +25,53 @@ bounce_queue_lifetime = 1d broken_sasl_auth_clients = yes disable_vrfy_command = yes maximal_backoff_time = 1800s -maximal_queue_lifetime = 1d +maximal_queue_lifetime = 5d +delay_warning_time = 4h message_size_limit = 104857600 milter_default_action = accept milter_protocol = 6 minimal_backoff_time = 300s plaintext_reject_code = 550 -postscreen_access_list = permit_mynetworks, cidr:/opt/postfix/conf/postscreen_access.cidr, tcp:127.0.0.1:10027 +postscreen_access_list = permit_mynetworks, + cidr:/opt/postfix/conf/postscreen_access.cidr, + tcp:127.0.0.1:10027 postscreen_bare_newline_enable = no postscreen_blacklist_action = drop postscreen_cache_cleanup_interval = 24h postscreen_cache_map = proxy:btree:$data_directory/postscreen_cache postscreen_dnsbl_action = enforce -postscreen_dnsbl_sites = b.barracudacentral.org=127.0.0.2*7 dnsbl.inps.de=127.0.0.2*7 bl.mailspike.net=127.0.0.2*5 bl.mailspike.net=127.0.0.[10;11;12]*4 dnsbl.sorbs.net=127.0.0.10*8 dnsbl.sorbs.net=127.0.0.5*6 dnsbl.sorbs.net=127.0.0.7*3 dnsbl.sorbs.net=127.0.0.8*2 dnsbl.sorbs.net=127.0.0.6*2 dnsbl.sorbs.net=127.0.0.9*2 zen.spamhaus.org=127.0.0.[10;11]*8 zen.spamhaus.org=127.0.0.[4..7]*6 zen.spamhaus.org=127.0.0.3*4 zen.spamhaus.org=127.0.0.2*3 hostkarma.junkemailfilter.com=127.0.0.2*3 hostkarma.junkemailfilter.com=127.0.0.4*1 hostkarma.junkemailfilter.com=127.0.1.2*1 wl.mailspike.net=127.0.0.[18;19;20]*-2 hostkarma.junkemailfilter.com=127.0.0.1*-2 -postscreen_dnsbl_threshold = 8 +postscreen_dnsbl_sites = wl.mailspike.net=127.0.0.[18;19;20]*-2 + hostkarma.junkemailfilter.com=127.0.0.1*-2 + list.dnswl.org=127.0.[0..255].0*-2 + list.dnswl.org=127.0.[0..255].1*-4 + list.dnswl.org=127.0.[0..255].2*-6 + list.dnswl.org=127.0.[0..255].3*-8 + ix.dnsbl.manitu.net*2 + bl.spamcop.net*2 + hostkarma.junkemailfilter.com=127.0.0.2*4 + hostkarma.junkemailfilter.com=127.0.0.3*2 + hostkarma.junkemailfilter.com=127.0.0.4*3 + hostkarma.junkemailfilter.com=127.0.1.2*1 + backscatter.spameatingmonkey.net*2 + bl.ipv6.spameatingmonkey.net*2 + bl.spameatingmonkey.net*2 + b.barracudacentral.org=127.0.0.2*7 + bl.mailspike.net=127.0.0.2*5 + bl.mailspike.net=127.0.0.[10;11;12]*4 + dnsbl.sorbs.net=127.0.0.10*8 + dnsbl.sorbs.net=127.0.0.5*6 + dnsbl.sorbs.net=127.0.0.7*3 + dnsbl.sorbs.net=127.0.0.8*2 + dnsbl.sorbs.net=127.0.0.6*2 + dnsbl.sorbs.net=127.0.0.9*2 + zen.spamhaus.org=127.0.0.[10;11]*8 + zen.spamhaus.org=127.0.0.[4..7]*6 + zen.spamhaus.org=127.0.0.3*4 + zen.spamhaus.org=127.0.0.2*3 + hostkarma.junkemailfilter.com=127.0.0.2*3 + hostkarma.junkemailfilter.com=127.0.0.4*2 + hostkarma.junkemailfilter.com=127.0.1.2*1 +postscreen_dnsbl_threshold = 5 postscreen_dnsbl_ttl = 5m postscreen_greet_action = enforce postscreen_greet_banner = $smtpd_banner @@ -40,16 +79,10 @@ postscreen_greet_ttl = 2d postscreen_greet_wait = 3s postscreen_non_smtp_command_enable = no postscreen_pipelining_enable = no -proxy_read_maps = proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_sender_acl.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_tls_policy_override_maps.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_sender_dependent_default_transport_maps.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_transport_maps.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_sasl_passwd_maps_sender_dependent.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_sasl_passwd_maps_transport_maps.cf, +proxy_read_maps = proxy:mysql:/opt/postfix/conf/sql/mysql_sasl_passwd_maps_transport_maps.cf, proxy:mysql:/opt/postfix/conf/sql/mysql_tls_enforce_in_policy.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_sender_bcc_maps.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_recipient_bcc_maps.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_recipient_canonical_maps.cf, + $sender_dependent_default_transport_maps, + $smtp_tls_policy_maps, $local_recipient_maps, $mydestination, $virtual_alias_maps, @@ -60,11 +93,14 @@ proxy_read_maps = proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_sender_acl.cf, $relay_domains, $canonical_maps, $sender_canonical_maps, + $sender_bcc_maps, + $recipient_bcc_maps, $recipient_canonical_maps, $relocated_maps, $transport_maps, $mynetworks, - $smtpd_sender_login_maps + $smtpd_sender_login_maps, + $smtp_sasl_password_maps queue_run_delay = 300s relay_domains = proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_relay_domain_maps.cf relay_recipient_maps = proxy:mysql:/opt/postfix/conf/sql/mysql_relay_recipient_maps.cf @@ -81,33 +117,47 @@ smtpd_error_sleep_time = 10s smtpd_hard_error_limit = ${stress?1}${stress:5} smtpd_helo_required = yes smtpd_proxy_timeout = 600s -smtpd_recipient_restrictions = permit_sasl_authenticated, permit_mynetworks, check_recipient_access proxy:mysql:/opt/postfix/conf/sql/mysql_tls_enforce_in_policy.cf, reject_invalid_helo_hostname, reject_unknown_reverse_client_hostname, reject_unauth_destination +smtpd_recipient_restrictions = permit_sasl_authenticated, + permit_mynetworks, + check_recipient_access proxy:mysql:/opt/postfix/conf/sql/mysql_tls_enforce_in_policy.cf, + reject_invalid_helo_hostname, + reject_unknown_reverse_client_hostname, + reject_unauth_destination smtpd_sasl_auth_enable = yes smtpd_sasl_authenticated_header = yes smtpd_sasl_path = inet:dovecot:10001 smtpd_sasl_type = dovecot smtpd_sender_login_maps = proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_sender_acl.cf -smtpd_sender_restrictions = reject_authenticated_sender_login_mismatch, permit_mynetworks, permit_sasl_authenticated, reject_unlisted_sender, reject_unknown_sender_domain +smtpd_sender_restrictions = reject_authenticated_sender_login_mismatch, + permit_mynetworks, + permit_sasl_authenticated, + reject_unlisted_sender, + reject_unknown_sender_domain smtpd_soft_error_limit = 3 smtpd_tls_auth_only = yes smtpd_tls_dh1024_param_file = /etc/ssl/mail/dhparams.pem smtpd_tls_eecdh_grade = auto smtpd_tls_exclude_ciphers = ECDHE-RSA-RC4-SHA, RC4, aNULL, DES-CBC3-SHA, ECDHE-RSA-DES-CBC3-SHA, EDH-RSA-DES-CBC3-SHA smtpd_tls_loglevel = 1 -smtp_tls_mandatory_protocols = !SSLv2, !SSLv3 + +# Mandatory protocols and ciphers are used when a connections is enforced to use TLS +# Does _not_ apply to enforced incoming TLS settings per mailbox +smtp_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1 +lmtp_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1 +smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1 +smtpd_tls_mandatory_ciphers = high + smtp_tls_protocols = !SSLv2, !SSLv3 -lmtp_tls_mandatory_protocols = !SSLv2, !SSLv3 -lmtp_tls_protocols = !SSLv2, !SSLv2, !SSLv3 -smtpd_tls_mandatory_protocols = !SSLv2, !SSLv3 +lmtp_tls_protocols = !SSLv2, !SSLv3, !TLSv1, !TLSv1.1 smtpd_tls_protocols = !SSLv2, !SSLv3 + smtpd_tls_security_level = may tls_preempt_cipherlist = yes tls_ssl_options = NO_COMPRESSION -smtpd_tls_mandatory_ciphers = high virtual_alias_maps = proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_alias_maps.cf, + proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_resource_maps.cf, proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_spamalias_maps.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_alias_domain_maps.cf, - proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_alias_domain_catchall_maps.cf + proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_alias_domain_maps.cf virtual_gid_maps = static:5000 virtual_mailbox_base = /var/vmail/ virtual_mailbox_domains = proxy:mysql:/opt/postfix/conf/sql/mysql_virtual_domains_maps.cf @@ -123,7 +173,6 @@ smtpd_milters = inet:rspamd:9900 non_smtpd_milters = inet:rspamd:9900 milter_mail_macros = i {mail_addr} {client_addr} {client_name} {auth_authen} mydestination = localhost.localdomain, localhost -#content_filter=zeyple # Prefere IPv4, useful for v4-only envs smtp_address_preference = ipv4 smtp_sender_dependent_authentication = yes @@ -134,5 +183,14 @@ smtp_sasl_mechanism_filter = plain, login smtp_tls_policy_maps=proxy:mysql:/opt/postfix/conf/sql/mysql_tls_policy_override_maps.cf smtp_header_checks = pcre:/opt/postfix/conf/anonymize_headers.pcre mail_name = Postcow -transport_maps = proxy:mysql:/opt/postfix/conf/sql/mysql_transport_maps.cf +# local_transport map catches local destinations and prevents routing local dests when the next map would route "*" +transport_maps = pcre:/opt/postfix/conf/local_transport, + proxy:mysql:/opt/postfix/conf/sql/mysql_transport_maps.cf smtp_sasl_auth_soft_bounce = no +postscreen_discard_ehlo_keywords = silent-discard, dsn +compatibility_level = 2 +smtputf8_enable = no + +# DO NOT EDIT ANYTHING BELOW # +# User overrides # + diff --git a/data/conf/postfix/master.cf b/data/conf/postfix/master.cf index 79642f6d..b664bbd5 100644 --- a/data/conf/postfix/master.cf +++ b/data/conf/postfix/master.cf @@ -1,29 +1,47 @@ +# inter-mx with postscreen on 25/tcp smtp inet n - n - 1 postscreen smtpd pass - - n - - smtpd -o smtpd_helo_restrictions=permit_mynetworks,reject_non_fqdn_helo_hostname -o smtpd_sasl_auth_enable=no + -o smtpd_sender_restrictions=permit_mynetworks,reject_unlisted_sender,reject_unknown_sender_domain + +# smtpd tls-wrapped (smtps) on 465/tcp smtps inet n - n - - smtpd -o smtpd_tls_wrappermode=yes -o smtpd_client_restrictions=permit_mynetworks,permit_sasl_authenticated,reject + -o smtpd_tls_mandatory_protocols=!SSLv2,!SSLv3 -o tls_preempt_cipherlist=yes + +# smtpd with starttls on 587/tcp submission inet n - n - - smtpd -o smtpd_client_restrictions=permit_mynetworks,permit_sasl_authenticated,reject -o smtpd_enforce_tls=yes -o smtpd_tls_security_level=encrypt + -o smtpd_tls_mandatory_protocols=!SSLv2,!SSLv3 -o tls_preempt_cipherlist=yes + +# used by SOGo +# smtpd_sender_restrictions should match main.cf, but with check_sasl_access prepended for login-as-mailbox-user function 588 inet n - n - - smtpd -o smtpd_client_restrictions=permit_mynetworks,permit_sasl_authenticated,reject -o smtpd_tls_auth_only=no -o smtpd_sender_restrictions=check_sasl_access,regexp:/opt/postfix/conf/allow_mailcow_local.regexp,reject_authenticated_sender_login_mismatch,permit_mynetworks,permit_sasl_authenticated,reject_unlisted_sender,reject_unknown_sender_domain + +# used to reinject quarantine mails 590 inet n - n - - smtpd -o smtpd_client_restrictions=permit_mynetworks,reject -o smtpd_tls_auth_only=no -o smtpd_milters= -o non_smtpd_milters= + +# enforced smtp connector smtp_enforced_tls unix - - n - - smtp -o smtp_tls_security_level=encrypt -o syslog_name=enforced-tls-smtp -o smtp_delivery_status_filter=pcre:/opt/postfix/conf/smtp_dsn_filter + +# smtp connector used, when a transport map matched +# this helps to have different sasl maps than we have with sender dependent transport maps smtp_via_transport_maps unix - - n - - smtp -o smtp_sasl_password_maps=proxy:mysql:/opt/postfix/conf/sql/mysql_sasl_passwd_maps_transport_maps.cf @@ -55,25 +73,12 @@ scache unix - - n - 1 scache maildrop unix - n n - - pipe flags=DRhu user=vmail argv=/usr/bin/maildrop -d ${recipient} -# start zeyple -zeyple unix - n n - - pipe - user=zeyple argv=/usr/local/bin/zeyple.py ${recipient} -127.0.0.1:10026 inet n - n - 10 smtpd - -o content_filter= - -o receive_override_options=no_unknown_recipient_checks,no_header_body_checks,no_milters - -o smtpd_helo_restrictions= - -o smtpd_client_restrictions= - -o smtpd_sender_restrictions= - -o smtpd_recipient_restrictions=permit_mynetworks,reject - -o mynetworks=127.0.0.0/8 - -o smtpd_authorized_xforward_hosts=127.0.0.0/8 -# end zeyple - # start whitelist_fwd 127.0.0.1:10027 inet n n n - 0 spawn user=nobody argv=/usr/local/bin/whitelist_forwardinghosts.sh # end whitelist_fwd # start watchdog-specific +# logs to local7 (hidden) 589 inet n - n - - smtpd -o smtpd_client_restrictions=permit_mynetworks,reject -o syslog_name=watchdog diff --git a/data/conf/rspamd/custom/bad_words.map b/data/conf/rspamd/custom/bad_words.map new file mode 100644 index 00000000..5f19d2a9 --- /dev/null +++ b/data/conf/rspamd/custom/bad_words.map @@ -0,0 +1,44 @@ +/\ssex\s/i +/\svagina\s/i +/\serotic\s/i +/\serection\s/i +/\ssexy\s/i +/\spenis\s/i +/\sass\s/i +/\sviagra\s/i +/\stits\s/i +/\stitty\s/i +/\stitties\s/i +/\scum\s/i +/\ssperm\s/i +/\sslut\s/i +/\sporn\s/i +/\scock\s/i +/\spharma\s/i +/\spharmacy\s/i +/\sseo\s/i +/\smarketing\s/i +/\sjackpot\s/i +/\slotto\s/i +/\slottery\s/i +/pillenversand/i +/\skredithilfe\s/i +/\skapital\s/i +/\skrankenversicherung\s/i +/bitcoin/i +/pädophil/i +/paedophil/i +/freiberufler/i +/unternehmer/i +/masturbieren/i +/trojaner/i +/malware/i +/\sscooter\s/i +/\sescooter\s/i +/\se-scooter\s/i +/testost/i +/\spotenz\s/i +/potenzmittel/i +/rezeptfrei/i +/apotheke/i +/web\sdevelopment/i diff --git a/data/conf/rspamd/custom/fishy_tlds.map b/data/conf/rspamd/custom/fishy_tlds.map new file mode 100644 index 00000000..01c7c86d --- /dev/null +++ b/data/conf/rspamd/custom/fishy_tlds.map @@ -0,0 +1,66 @@ +/.+\.accountant$/i +/.+\.art$/i +/.+\.asia$/i +/.+\.bid$/i +/.+\.biz$/i +/.+\.care$/i +/.+\.cf$/i +/.+\.cl$/i +/.+\.click$/i +/.+\.cloud$/i +/.+\.co$/i +/.+\.construction$/i +/.+\.country$/i +/.+\.cricket$/i +/.+\.date$/i +/.+\.desi$/i +/.+\.download$/i +/.+\.estate$/i +/.+\.faith$/i +/.+\.fit$/i +/.+\.flights$/i +/.+\.ga$/i +/.+\.gdn$/i +/.+\.gq$/i +/.+\.guru$/i +/.+\.icu$/i +/.+\.id$/i +/.+\.info$/i +/.+\.in.net$/i +/.+\.ir$/i +/.+\.jetzt$/i +/.+\.kim$/i +/.+\.life$/i +/.+\.link$/i +/.+\.loan$/i +/.+\.mk$/i +/.+\.ml$/i +/.+\.ninja$/i +/.+\.online$/i +/.+\.ooo$/i +/.+\.party$/i +/.+\.pro$/i +/.+\.ps$/i +/.+\.pw$/i +/.+\.racing$/i +/.+\.review$/i +/.+\.rocks$/i +/.+\.ryukyu$/i +/.+\.science$/i +/.+\.site$/i +/.+\.space$/i +/.+\.stream$/i +/.+\.sucks$/i +/.+\.tk$/i +/.+\.top$/i +/.+\.topica\.com$/i +/.+\.town$/i +/.+\.trade$/i +/.+\.uno$/i +/.+\.vip$/i +/.+\.webcam$/i +/.+\.website$/i +/.+\.win$/i +/.+\.work$/i +/.+\.world$/i +/.+\.xyz$/i diff --git a/data/conf/rspamd/custom/ip_wl.map b/data/conf/rspamd/custom/ip_wl.map new file mode 100644 index 00000000..c8bb5529 --- /dev/null +++ b/data/conf/rspamd/custom/ip_wl.map @@ -0,0 +1,4 @@ +# IP whitelist +# 127.0.0.1 +# 1.2.3.4 +# ... diff --git a/data/conf/rspamd/dynmaps/settings.php b/data/conf/rspamd/dynmaps/settings.php index 4d78456e..0bf9f519 100644 --- a/data/conf/rspamd/dynmaps/settings.php +++ b/data/conf/rspamd/dynmaps/settings.php @@ -6,6 +6,8 @@ then any of these will trigger the rule. If a rule is triggered then no more rul */ header('Content-Type: text/plain'); require_once "vars.inc.php"; +// Getting headers sent by the client. +//$headers = apache_request_headers(); ini_set('error_reporting', 0); @@ -25,6 +27,23 @@ catch (PDOException $e) { exit; } +// Check if db changed and return header +/*$stmt = $pdo->prepare("SELECT UNIX_TIMESTAMP(UPDATE_TIME) AS `db_update_time` FROM information_schema.tables + WHERE `TABLE_NAME` = 'filterconf' + AND TABLE_SCHEMA = :dbname;"); +$stmt->execute(array( + ':dbname' => $database_name +)); +$db_update_time = $stmt->fetch(PDO::FETCH_ASSOC)['db_update_time']; + +if (isset($headers['If-Modified-Since']) && (strtotime($headers['If-Modified-Since']) == $db_update_time)) { + header('Last-Modified: '.gmdate('D, d M Y H:i:s', $db_update_time).' GMT', true, 304); + exit; +} else { + header('Last-Modified: '.gmdate('D, d M Y H:i:s', $db_update_time).' GMT', true, 200); +} +*/ + function parse_email($email) { if (!filter_var($email, FILTER_VALIDATE_EMAIL)) return false; $a = strrpos($email, '@'); @@ -43,7 +62,9 @@ function wl_by_sogo() { if (!filter_var($contact, FILTER_VALIDATE_EMAIL)) { continue; } - $rcpt[$row['user']][] = '/^' . str_replace('/', '\/', $contact) . '$/i'; + // Explicit from, no mime_from, no regex - envelope must match + // mailcow white and blacklists also cover mime_from + $rcpt[$row['user']][] = str_replace('/', '\/', $contact); } } return $rcpt; @@ -67,7 +88,7 @@ function ucl_rcpts($object, $type) { if (!empty($local) && !empty($domain)) { $rcpt[] = '/^' . str_replace('/', '\/', $local) . '[+].*' . str_replace('/', '\/', $domain) . '$/i'; } - $rcpt[] = '/^' . str_replace('/', '\/', $row['address']) . '$/i'; + $rcpt[] = str_replace('/', '\/', $row['address']); } // Aliases by alias domains $stmt = $pdo->prepare("SELECT CONCAT(`local_part`, '@', `alias_domain`.`alias_domain`) AS `alias` FROM `mailbox` @@ -85,7 +106,7 @@ function ucl_rcpts($object, $type) { if (!empty($local) && !empty($domain)) { $rcpt[] = '/^' . str_replace('/', '\/', $local) . '[+].*' . str_replace('/', '\/', $domain) . '$/i'; } - $rcpt[] = '/^' . str_replace('/', '\/', $row['alias']) . '$/i'; + $rcpt[] = str_replace('/', '\/', $row['alias']); } } } @@ -107,8 +128,8 @@ function ucl_rcpts($object, $type) { settings { watchdog { priority = 10; - rcpt = "/null@localhost/i"; - from = "/watchdog@localhost/i"; + rcpt_mime = "/null@localhost/i"; + from_mime = "/watchdog@localhost/i"; apply "default" { actions { reject = 9999.0; @@ -199,12 +220,13 @@ while ($row = array_shift($rows)) { ?> whitelist_ { prepare("SELECT `value` FROM `filterconf` WHERE `object`= :object AND `option` = 'whitelist_from'"); $stmt->execute(array(':object' => $row['object'])); $list_items = $stmt->fetchAll(PDO::FETCH_ASSOC); - while ($item = array_shift($list_items)) { + foreach ($list_items as $item) { ?> from = "//i"; { + whitelist_mime_ { prepare("SELECT `value` FROM `filterconf` - WHERE `object`= :object - AND `option` = 'whitelist_from'"); - $stmt->execute(array(':object' => $row['object'])); - $list_items = $stmt->fetchAll(PDO::FETCH_ASSOC); + foreach ($list_items as $item) { ?> - header = { + from_mime = "//i"; - "From" = "/()/i"; - } - priority = 5; @@ -297,13 +308,13 @@ while ($row = array_shift($rows)) { ?> blacklist_ { prepare("SELECT `value` FROM `filterconf` WHERE `object`= :object AND `option` = 'blacklist_from'"); $stmt->execute(array(':object' => $row['object'])); $list_items = $stmt->fetchAll(PDO::FETCH_ASSOC); - while ($item = array_shift($list_items)) { + foreach ($list_items as $item) { ?> from = "//i"; { prepare("SELECT `value` FROM `filterconf` - WHERE `object`= :object - AND `option` = 'blacklist_from'"); - $stmt->execute(array(':object' => $row['object'])); - $list_items = $stmt->fetchAll(PDO::FETCH_ASSOC); + foreach ($list_items as $item) { ?> - header = { + from_mime = "//i"; - "From" = "/()/i"; - } - priority = 5; diff --git a/data/conf/rspamd/local.d/arc.conf b/data/conf/rspamd/local.d/arc.conf index e8d95871..277e0cc0 100644 --- a/data/conf/rspamd/local.d/arc.conf +++ b/data/conf/rspamd/local.d/arc.conf @@ -28,3 +28,5 @@ use_redis = true; key_prefix = "DKIM_PRIV_KEYS"; # Selector map selector_prefix = "DKIM_SELECTORS"; +sign_inbound = true; +use_domain_sign_inbound = "recipient"; diff --git a/data/conf/rspamd/local.d/composites.conf b/data/conf/rspamd/local.d/composites.conf index d775c4f6..036737de 100644 --- a/data/conf/rspamd/local.d/composites.conf +++ b/data/conf/rspamd/local.d/composites.conf @@ -16,3 +16,17 @@ SOGO_CONTACT_EXCLUDE_FWD_HOST { SOGO_CONTACT_SPOOFED { expression = "(R_SPF_PERMFAIL | R_SPF_SOFTFAIL | R_SPF_FAIL) & ~SOGO_CONTACT"; } +SPOOFED_UNAUTH { + expression = "!MAILCOW_AUTH & !MAILCOW_WHITE & !R_SPF_ALLOW & !DMARC_POLICY_ALLOW & !ARC_ALLOW & !SIEVE_HOST & MAILCOW_DOMAIN_HEADER_FROM"; + score = 5.0; +} +# Only apply to inbound unauthed and not whitelisted +OLEFY_MACRO { + expression = "!MAILCOW_AUTH & !MAILCOW_WHITE & OLETOOLS"; + score = 20.0; + policy = "remove_weight"; +} +BAD_WORD_BAD_TLD { + expression = "FISHY_TLD & BAD_WORDS" + score = 10.0; +} diff --git a/data/conf/rspamd/local.d/external_services.conf b/data/conf/rspamd/local.d/external_services.conf new file mode 100644 index 00000000..bed4d917 --- /dev/null +++ b/data/conf/rspamd/local.d/external_services.conf @@ -0,0 +1,7 @@ +oletools { + # default olefy settings + servers = "olefy:10055"; + # needs to be set explicitly for Rspamd < 1.9.5 + scan_mime_parts = true; + # mime-part regex matching in content-type or filename +} diff --git a/data/conf/rspamd/local.d/metadata_exporter.conf b/data/conf/rspamd/local.d/metadata_exporter.conf index afe5c7e1..eaf9a5b2 100644 --- a/data/conf/rspamd/local.d/metadata_exporter.conf +++ b/data/conf/rspamd/local.d/metadata_exporter.conf @@ -20,7 +20,7 @@ return function(task) if ratelimited then return true end - return + return false end EOD; } diff --git a/data/conf/rspamd/local.d/milter_headers.conf b/data/conf/rspamd/local.d/milter_headers.conf index 7f21b8e0..d8d40258 100644 --- a/data/conf/rspamd/local.d/milter_headers.conf +++ b/data/conf/rspamd/local.d/milter_headers.conf @@ -13,6 +13,7 @@ routines { authentication-results { header = "Authentication-Results"; remove = 1; + add_smtp_user = false; spf_symbols { pass = "R_SPF_ALLOW"; fail = "R_SPF_FAIL"; diff --git a/data/conf/rspamd/local.d/multimap.conf b/data/conf/rspamd/local.d/multimap.conf index 7752b813..c27370b0 100644 --- a/data/conf/rspamd/local.d/multimap.conf +++ b/data/conf/rspamd/local.d/multimap.conf @@ -83,3 +83,39 @@ GLOBAL_RCPT_BL { prefilter = true; action = "reject"; } + +SIEVE_HOST { + type = "ip"; + map = "$LOCAL_CONFDIR/custom/dovecot_trusted.map"; + symbols_set = ["SIEVE_HOST"]; +} + +MAILCOW_DOMAIN_HEADER_FROM { + type = "header"; + header = "from"; + filter = "email:domain"; + map = "redis://DOMAIN_MAP"; +} + +IP_WHITELIST { + type = "ip"; + map = "$LOCAL_CONFDIR/custom/ip_wl.map"; + prefilter = "true"; + action = "accept"; +} + +FISHY_TLD { + type = "from"; + filter = "email:domain"; + map = "${LOCAL_CONFDIR}/custom/fishy_tlds.map"; + regexp = true; + score = 0.1; +} + +BAD_WORDS { + type = "content"; + filter = "text"; + map = "${LOCAL_CONFDIR}/custom/bad_words.map"; + regexp = true; + score = 0.1; +} diff --git a/data/conf/rspamd/local.d/policies_group.conf b/data/conf/rspamd/local.d/policies_group.conf index f1c2b9f1..7f128fec 100644 --- a/data/conf/rspamd/local.d/policies_group.conf +++ b/data/conf/rspamd/local.d/policies_group.conf @@ -11,7 +11,13 @@ symbols = { "R_DKIM_REJECT" { score = 10.0; } - "R_DKIM_PERMFAIL" { - score = 10.0; + "DMARC_POLICY_REJECT" { + weight = 20.0; + } + "DMARC_POLICY_QUARANTINE" { + weight = 10.0; + } + "DMARC_POLICY_SOFTFAIL" { + weight = 2.0; } } diff --git a/data/conf/rspamd/local.d/rbl.conf b/data/conf/rspamd/local.d/rbl.conf new file mode 100644 index 00000000..3936cbbf --- /dev/null +++ b/data/conf/rspamd/local.d/rbl.conf @@ -0,0 +1,10 @@ +rbls { + uceprotect1 { + symbol = "RBL_UCEPROTECT_LEVEL1"; + rbl = "dnsbl-1.uceprotect.net"; + } + uceprotect2 { + symbol = "RBL_UCEPROTECT_LEVEL2"; + rbl = "dnsbl-2.uceprotect.net"; + } +} diff --git a/data/conf/rspamd/local.d/rbl_group.conf b/data/conf/rspamd/local.d/rbl_group.conf new file mode 100644 index 00000000..86c4e023 --- /dev/null +++ b/data/conf/rspamd/local.d/rbl_group.conf @@ -0,0 +1,8 @@ +symbols = { + "RBL_UCEPROTECT_LEVEL1" { + score = 3.5; + } + "RBL_UCEPROTECT_LEVEL2" { + score = 1.5; + } +} diff --git a/data/conf/rspamd/local.d/rspamd.conf.local b/data/conf/rspamd/local.d/rspamd.conf.local deleted file mode 100644 index 0662c47d..00000000 --- a/data/conf/rspamd/local.d/rspamd.conf.local +++ /dev/null @@ -1,16 +0,0 @@ -# rspamd.conf.local - -worker "fuzzy" { - # Socket to listen on (UDP and TCP from rspamd 1.3) - bind_socket = "*:11445"; - allow_update = ["127.0.0.1", "::1"]; - # Number of processes to serve this storage (useful for read scaling) - count = 2; - # Backend ("sqlite" or "redis" - default "sqlite") - backend = "redis"; - # Hashes storage time (3 months) - expire = 90d; - # Synchronize updates to the storage each minute - sync = 1min; -} - diff --git a/data/conf/rspamd/local.d/spamassassin.conf b/data/conf/rspamd/local.d/spamassassin.conf new file mode 100644 index 00000000..d091af63 --- /dev/null +++ b/data/conf/rspamd/local.d/spamassassin.conf @@ -0,0 +1 @@ +ruleset = "/etc/rspamd/custom/sa-rules"; diff --git a/data/conf/rspamd/local.d/statistics_group.conf b/data/conf/rspamd/local.d/statistics_group.conf index 160c65fa..49c952bd 100644 --- a/data/conf/rspamd/local.d/statistics_group.conf +++ b/data/conf/rspamd/local.d/statistics_group.conf @@ -1,10 +1,10 @@ symbols = { "BAYES_SPAM" { - weight = 8.5; + weight = 2.5; description = "Message probably spam, probability: "; } "BAYES_HAM" { - weight = -12.5; + weight = -10.5; description = "Message probably ham, probability: "; } } diff --git a/data/conf/rspamd/meta_exporter/pipe.php b/data/conf/rspamd/meta_exporter/pipe.php index 3e29d207..31f6037f 100644 --- a/data/conf/rspamd/meta_exporter/pipe.php +++ b/data/conf/rspamd/meta_exporter/pipe.php @@ -84,6 +84,9 @@ $rcpt_final_mailboxes = array(); // Loop through all rcpts foreach (json_decode($rcpts, true) as $rcpt) { + // Remove tag + $rcpt = preg_replace('/^(.*?)\+.*(@.*)$/', '$1$2', $rcpt); + // Break rcpt into local part and domain part $parsed_rcpt = parse_email($rcpt); @@ -128,6 +131,14 @@ foreach (json_decode($rcpts, true) as $rcpt) { )); $gotos = $stmt->fetch(PDO::FETCH_ASSOC)['goto']; } + if (empty($gotos)) { + $stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :rcpt AND `active` = '1'"); + $stmt->execute(array(':rcpt' => $parsed_rcpt['domain'])); + $goto_branch = $stmt->fetch(PDO::FETCH_ASSOC)['target_domain']; + if ($goto_branch) { + $gotos = $parsed_rcpt['local'] . '@' . $goto_branch; + } + } $gotos_array = explode(',', $gotos); $loop_c = 0; @@ -156,8 +167,18 @@ foreach (json_decode($rcpts, true) as $rcpt) { $stmt = $pdo->prepare("SELECT `goto` FROM `alias` WHERE `address` = :goto AND `active` = '1'"); $stmt->execute(array(':goto' => $goto)); $goto_branch = $stmt->fetch(PDO::FETCH_ASSOC)['goto']; - error_log("QUARANTINE: quarantine pipe: goto address " . $goto . " is a alias branch for " . $goto_branch); - $goto_branch_array = explode(',', $goto_branch); + if ($goto_branch) { + error_log("QUARANTINE: quarantine pipe: goto address " . $goto . " is a alias branch for " . $goto_branch); + $goto_branch_array = explode(',', $goto_branch); + } else { + $stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :domain AND `active` AND '1'"); + $stmt->execute(array(':domain' => $parsed_goto['domain'])); + $goto_branch = $stmt->fetch(PDO::FETCH_ASSOC)['target_domain']; + if ($goto_branch) { + error_log("QUARANTINE: quarantine pipe: goto domain " . $parsed_gto['domain'] . " is a domain alias branch for " . $goto_branch); + $goto_branch_array = array($parsed_gto['local'] . '@' . $goto_branch); + } + } } } // goto item was processed, unset diff --git a/data/conf/rspamd/override.d/ratelimit.conf b/data/conf/rspamd/override.d/ratelimit.conf index ccd083d4..f02d2d3c 100644 --- a/data/conf/rspamd/override.d/ratelimit.conf +++ b/data/conf/rspamd/override.d/ratelimit.conf @@ -1,8 +1,8 @@ rates { # Format: "1 / 1h" or "20 / 1m" etc. - global ratelimits are disabled by default - to = "45 / 1m"; - to_ip = "360 / 1m"; - to_ip_from = "180 / 1m"; + to = "100 / 1s"; + to_ip = "100 / 1s"; + to_ip_from = "100 / 1s"; bounce_to = "100 / 1s"; bounce_to_ip = "100 / 1s"; } diff --git a/data/conf/rspamd/override.d/worker-controller-password.inc b/data/conf/rspamd/override.d/worker-controller-password.inc deleted file mode 100644 index 9a5984d1..00000000 --- a/data/conf/rspamd/override.d/worker-controller-password.inc +++ /dev/null @@ -1 +0,0 @@ -# Placeholder diff --git a/data/conf/rspamd/override.d/worker-fuzzy.inc b/data/conf/rspamd/override.d/worker-fuzzy.inc new file mode 100644 index 00000000..09b39c93 --- /dev/null +++ b/data/conf/rspamd/override.d/worker-fuzzy.inc @@ -0,0 +1,12 @@ +# Socket to listen on (UDP and TCP from rspamd 1.3) +bind_socket = "*:11445"; +allow_update = ["127.0.0.1", "::1"]; +# Number of processes to serve this storage (useful for read scaling) +count = 2; +# Backend ("sqlite" or "redis" - default "sqlite") +backend = "redis"; +# Hashes storage time (3 months) +expire = 90d; +# Synchronize updates to the storage each minute +sync = 1min; + diff --git a/data/conf/rspamd/override.d/worker-proxy.inc b/data/conf/rspamd/override.d/worker-proxy.inc index 0df926a7..92527f2b 100644 --- a/data/conf/rspamd/override.d/worker-proxy.inc +++ b/data/conf/rspamd/override.d/worker-proxy.inc @@ -1,6 +1,6 @@ bind_socket = "rspamd:9900"; milter = true; -upstream { +upstream "local" { name = "localhost"; default = true; hosts = "rspamd:11333" diff --git a/data/conf/rspamd/plugins.d/README.md b/data/conf/rspamd/plugins.d/README.md new file mode 100644 index 00000000..1516cf2d --- /dev/null +++ b/data/conf/rspamd/plugins.d/README.md @@ -0,0 +1 @@ +This is where you should copy any rspamd custom module diff --git a/data/conf/rspamd/rspamd.conf.local b/data/conf/rspamd/rspamd.conf.local new file mode 100644 index 00000000..9f2f8f1d --- /dev/null +++ b/data/conf/rspamd/rspamd.conf.local @@ -0,0 +1 @@ +# rspamd.conf.local diff --git a/data/conf/rspamd/rspamd.conf.override b/data/conf/rspamd/rspamd.conf.override new file mode 100644 index 00000000..d033e8e2 --- /dev/null +++ b/data/conf/rspamd/rspamd.conf.override @@ -0,0 +1,2 @@ +# rspamd.conf.override + diff --git a/data/conf/sogo/sogo.conf b/data/conf/sogo/sogo.conf index aa1a86ec..f9e9e077 100644 --- a/data/conf/sogo/sogo.conf +++ b/data/conf/sogo/sogo.conf @@ -26,7 +26,6 @@ // (domain3.tld, domain2.tld) // ); - SOGoIMAPServer = "imap://dovecot:143/?tls=YES"; SOGoSieveServer = "sieve://dovecot:4190/?tls=YES"; SOGoSMTPServer = "postfix:588"; WOPort = "0.0.0.0:20000"; diff --git a/data/conf/unbound/unbound.conf b/data/conf/unbound/unbound.conf index e2e3e9eb..f67729b3 100644 --- a/data/conf/unbound/unbound.conf +++ b/data/conf/unbound/unbound.conf @@ -32,6 +32,7 @@ server: hide-version: yes max-udp-size: 4096 msg-buffer-size: 65552 + unwanted-reply-threshold: 10000 remote-control: control-enable: yes diff --git a/data/web/admin.php b/data/web/admin.php index 6ca89e97..5c79aab4 100644 --- a/data/web/admin.php +++ b/data/web/admin.php @@ -5,6 +5,9 @@ if (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == "admi require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/header.inc.php'; $_SESSION['return_to'] = $_SERVER['REQUEST_URI']; $tfa_data = get_tfa(); +if (!isset($_SESSION['gal']) && $license_cache = $redis->Get('LICENSE_STATUS_CACHE')) { + $_SESSION['gal'] = json_decode($license_cache, true); +} ?>
@@ -76,8 +79,40 @@ $tfa_data = get_tfa();
- - API (experimental, work in progress) + + + + +
+
+
+ +
+
+ + " aria-hidden="true"> + + +
+

+ : - + : +

+
+
+
+
+

+
+ +
+
+
+
+
+ + + API
+

@@ -113,6 +149,7 @@ $tfa_data = get_tfa();
+
@@ -252,7 +289,7 @@ $tfa_data = get_tfa();
- + ' required>
@@ -266,6 +303,16 @@ $tfa_data = get_tfa();
+ +
+ +

@@ -326,7 +373,7 @@ $tfa_data = get_tfa(); else { ?>
-
+

:

@@ -600,13 +647,14 @@ $tfa_data = get_tfa();

+
+ -

- -

+
-
+
-
+
+
+
+ + +
+
- +
- +
@@ -699,13 +753,13 @@ $tfa_data = get_tfa();
- +
- +
@@ -746,6 +800,7 @@ $tfa_data = get_tfa();
+
@@ -796,11 +851,11 @@ $tfa_data = get_tfa();
- +
- +
+
+ +
+ +
+
@@ -379,7 +385,6 @@ if (isset($_SESSION['mailcow_cc_role'])) {
-
@@ -401,7 +406,6 @@ if (isset($_SESSION['mailcow_cc_role'])) {
-
@@ -502,6 +506,7 @@ if (isset($_SESSION['mailcow_cc_role'])) { $mailbox = html_entity_decode(rawurldecode($_GET["mailbox"])); $result = mailbox('get', 'mailbox_details', $mailbox); $rl = ratelimit('get', 'mailbox', $mailbox); + $quarantine_notification = mailbox('get', 'quarantine_notification', $mailbox); if (!empty($result)) { ?>

@@ -511,21 +516,22 @@ if (isset($_SESSION['mailcow_cc_role'])) {
- +
-
- +
+ +
+
+
+ +
+
+ + + + +
+ +

@@ -590,6 +638,13 @@ if (isset($_SESSION['mailcow_cc_role'])) {
+
+ +
+ + +
+
@@ -639,6 +694,7 @@ if (isset($_SESSION['mailcow_cc_role'])) {
+

@@ -681,6 +737,7 @@ if (isset($_SESSION['mailcow_cc_role'])) {
+

@@ -784,7 +841,7 @@ if (isset($_SESSION['mailcow_cc_role'])) {
- +
@@ -1027,7 +1084,7 @@ if (isset($_SESSION['mailcow_cc_role'])) {
- +
+ +
@@ -1162,13 +1220,13 @@ if (isset($_SESSION['mailcow_cc_role'])) {
- +
- +
",a.querySelectorAll("[msallowcapture^='']").length&&q.push("[*^$]="+L+"*(?:''|\"\")"),a.querySelectorAll("[selected]").length||q.push("\\["+L+"*(?:value|"+K+")"),a.querySelectorAll("[id~="+u+"-]").length||q.push("~="),a.querySelectorAll(":checked").length||q.push(":checked"),a.querySelectorAll("a#"+u+"+*").length||q.push(".#.+[+~]")}),ia(function(a){var b=n.createElement("input");b.setAttribute("type","hidden"),a.appendChild(b).setAttribute("name","D"),a.querySelectorAll("[name=d]").length&&q.push("name"+L+"*[*^$|!~]?="),a.querySelectorAll(":enabled").length||q.push(":enabled",":disabled"),a.querySelectorAll("*,:x"),q.push(",.*:")})),(c.matchesSelector=Z.test(s=o.matches||o.webkitMatchesSelector||o.mozMatchesSelector||o.oMatchesSelector||o.msMatchesSelector))&&ia(function(a){c.disconnectedMatch=s.call(a,"div"),s.call(a,"[s!='']:x"),r.push("!=",O)}),q=q.length&&new RegExp(q.join("|")),r=r.length&&new RegExp(r.join("|")),b=Z.test(o.compareDocumentPosition),t=b||Z.test(o.contains)?function(a,b){var c=9===a.nodeType?a.documentElement:a,d=b&&b.parentNode;return a===d||!(!d||1!==d.nodeType||!(c.contains?c.contains(d):a.compareDocumentPosition&&16&a.compareDocumentPosition(d)))}:function(a,b){if(b)while(b=b.parentNode)if(b===a)return!0;return!1},B=b?function(a,b){if(a===b)return l=!0,0;var d=!a.compareDocumentPosition-!b.compareDocumentPosition;return d?d:(d=(a.ownerDocument||a)===(b.ownerDocument||b)?a.compareDocumentPosition(b):1,1&d||!c.sortDetached&&b.compareDocumentPosition(a)===d?a===n||a.ownerDocument===v&&t(v,a)?-1:b===n||b.ownerDocument===v&&t(v,b)?1:k?J(k,a)-J(k,b):0:4&d?-1:1)}:function(a,b){if(a===b)return l=!0,0;var c,d=0,e=a.parentNode,f=b.parentNode,g=[a],h=[b];if(!e||!f)return a===n?-1:b===n?1:e?-1:f?1:k?J(k,a)-J(k,b):0;if(e===f)return ka(a,b);c=a;while(c=c.parentNode)g.unshift(c);c=b;while(c=c.parentNode)h.unshift(c);while(g[d]===h[d])d++;return d?ka(g[d],h[d]):g[d]===v?-1:h[d]===v?1:0},n):n},fa.matches=function(a,b){return fa(a,null,null,b)},fa.matchesSelector=function(a,b){if((a.ownerDocument||a)!==n&&m(a),b=b.replace(T,"='$1']"),c.matchesSelector&&p&&!A[b+" "]&&(!r||!r.test(b))&&(!q||!q.test(b)))try{var d=s.call(a,b);if(d||c.disconnectedMatch||a.document&&11!==a.document.nodeType)return d}catch(e){}return fa(b,n,null,[a]).length>0},fa.contains=function(a,b){return(a.ownerDocument||a)!==n&&m(a),t(a,b)},fa.attr=function(a,b){(a.ownerDocument||a)!==n&&m(a);var e=d.attrHandle[b.toLowerCase()],f=e&&D.call(d.attrHandle,b.toLowerCase())?e(a,b,!p):void 0;return void 0!==f?f:c.attributes||!p?a.getAttribute(b):(f=a.getAttributeNode(b))&&f.specified?f.value:null},fa.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)},fa.uniqueSort=function(a){var b,d=[],e=0,f=0;if(l=!c.detectDuplicates,k=!c.sortStable&&a.slice(0),a.sort(B),l){while(b=a[f++])b===a[f]&&(e=d.push(f));while(e--)a.splice(d[e],1)}return k=null,a},e=fa.getText=function(a){var b,c="",d=0,f=a.nodeType;if(f){if(1===f||9===f||11===f){if("string"==typeof a.textContent)return a.textContent;for(a=a.firstChild;a;a=a.nextSibling)c+=e(a)}else if(3===f||4===f)return a.nodeValue}else while(b=a[d++])c+=e(b);return c},d=fa.selectors={cacheLength:50,createPseudo:ha,match:W,attrHandle:{},find:{},relative:{">":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(a){return a[1]=a[1].replace(ba,ca),a[3]=(a[3]||a[4]||a[5]||"").replace(ba,ca),"~="===a[2]&&(a[3]=" "+a[3]+" "),a.slice(0,4)},CHILD:function(a){return a[1]=a[1].toLowerCase(),"nth"===a[1].slice(0,3)?(a[3]||fa.error(a[0]),a[4]=+(a[4]?a[5]+(a[6]||1):2*("even"===a[3]||"odd"===a[3])),a[5]=+(a[7]+a[8]||"odd"===a[3])):a[3]&&fa.error(a[0]),a},PSEUDO:function(a){var b,c=!a[6]&&a[2];return W.CHILD.test(a[0])?null:(a[3]?a[2]=a[4]||a[5]||"":c&&U.test(c)&&(b=g(c,!0))&&(b=c.indexOf(")",c.length-b)-c.length)&&(a[0]=a[0].slice(0,b),a[2]=c.slice(0,b)),a.slice(0,3))}},filter:{TAG:function(a){var b=a.replace(ba,ca).toLowerCase();return"*"===a?function(){return!0}:function(a){return a.nodeName&&a.nodeName.toLowerCase()===b}},CLASS:function(a){var b=y[a+" "];return b||(b=new RegExp("(^|"+L+")"+a+"("+L+"|$)"))&&y(a,function(a){return b.test("string"==typeof a.className&&a.className||"undefined"!=typeof a.getAttribute&&a.getAttribute("class")||"")})},ATTR:function(a,b,c){return function(d){var e=fa.attr(d,a);return null==e?"!="===b:b?(e+="","="===b?e===c:"!="===b?e!==c:"^="===b?c&&0===e.indexOf(c):"*="===b?c&&e.indexOf(c)>-1:"$="===b?c&&e.slice(-c.length)===c:"~="===b?(" "+e.replace(P," ")+" ").indexOf(c)>-1:"|="===b?e===c||e.slice(0,c.length+1)===c+"-":!1):!0}},CHILD:function(a,b,c,d,e){var f="nth"!==a.slice(0,3),g="last"!==a.slice(-4),h="of-type"===b;return 1===d&&0===e?function(a){return!!a.parentNode}:function(b,c,i){var j,k,l,m,n,o,p=f!==g?"nextSibling":"previousSibling",q=b.parentNode,r=h&&b.nodeName.toLowerCase(),s=!i&&!h,t=!1;if(q){if(f){while(p){m=b;while(m=m[p])if(h?m.nodeName.toLowerCase()===r:1===m.nodeType)return!1;o=p="only"===a&&!o&&"nextSibling"}return!0}if(o=[g?q.firstChild:q.lastChild],g&&s){m=q,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n&&j[2],m=n&&q.childNodes[n];while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if(1===m.nodeType&&++t&&m===b){k[a]=[w,n,t];break}}else if(s&&(m=b,l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),j=k[a]||[],n=j[0]===w&&j[1],t=n),t===!1)while(m=++n&&m&&m[p]||(t=n=0)||o.pop())if((h?m.nodeName.toLowerCase()===r:1===m.nodeType)&&++t&&(s&&(l=m[u]||(m[u]={}),k=l[m.uniqueID]||(l[m.uniqueID]={}),k[a]=[w,t]),m===b))break;return t-=e,t===d||t%d===0&&t/d>=0}}},PSEUDO:function(a,b){var c,e=d.pseudos[a]||d.setFilters[a.toLowerCase()]||fa.error("unsupported pseudo: "+a);return e[u]?e(b):e.length>1?(c=[a,a,"",b],d.setFilters.hasOwnProperty(a.toLowerCase())?ha(function(a,c){var d,f=e(a,b),g=f.length;while(g--)d=J(a,f[g]),a[d]=!(c[d]=f[g])}):function(a){return e(a,0,c)}):e}},pseudos:{not:ha(function(a){var b=[],c=[],d=h(a.replace(Q,"$1"));return d[u]?ha(function(a,b,c,e){var f,g=d(a,null,e,[]),h=a.length;while(h--)(f=g[h])&&(a[h]=!(b[h]=f))}):function(a,e,f){return b[0]=a,d(b,null,f,c),b[0]=null,!c.pop()}}),has:ha(function(a){return function(b){return fa(a,b).length>0}}),contains:ha(function(a){return a=a.replace(ba,ca),function(b){return(b.textContent||b.innerText||e(b)).indexOf(a)>-1}}),lang:ha(function(a){return V.test(a||"")||fa.error("unsupported lang: "+a),a=a.replace(ba,ca).toLowerCase(),function(b){var c;do if(c=p?b.lang:b.getAttribute("xml:lang")||b.getAttribute("lang"))return c=c.toLowerCase(),c===a||0===c.indexOf(a+"-");while((b=b.parentNode)&&1===b.nodeType);return!1}}),target:function(b){var c=a.location&&a.location.hash;return c&&c.slice(1)===b.id},root:function(a){return a===o},focus:function(a){return a===n.activeElement&&(!n.hasFocus||n.hasFocus())&&!!(a.type||a.href||~a.tabIndex)},enabled:function(a){return a.disabled===!1},disabled:function(a){return a.disabled===!0},checked:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&!!a.checked||"option"===b&&!!a.selected},selected:function(a){return a.parentNode&&a.parentNode.selectedIndex,a.selected===!0},empty:function(a){for(a=a.firstChild;a;a=a.nextSibling)if(a.nodeType<6)return!1;return!0},parent:function(a){return!d.pseudos.empty(a)},header:function(a){return Y.test(a.nodeName)},input:function(a){return X.test(a.nodeName)},button:function(a){var b=a.nodeName.toLowerCase();return"input"===b&&"button"===a.type||"button"===b},text:function(a){var b;return"input"===a.nodeName.toLowerCase()&&"text"===a.type&&(null==(b=a.getAttribute("type"))||"text"===b.toLowerCase())},first:na(function(){return[0]}),last:na(function(a,b){return[b-1]}),eq:na(function(a,b,c){return[0>c?c+b:c]}),even:na(function(a,b){for(var c=0;b>c;c+=2)a.push(c);return a}),odd:na(function(a,b){for(var c=1;b>c;c+=2)a.push(c);return a}),lt:na(function(a,b,c){for(var d=0>c?c+b:c;--d>=0;)a.push(d);return a}),gt:na(function(a,b,c){for(var d=0>c?c+b:c;++db;b++)d+=a[b].value;return d}function ra(a,b,c){var d=b.dir,e=c&&"parentNode"===d,f=x++;return b.first?function(b,c,f){while(b=b[d])if(1===b.nodeType||e)return a(b,c,f)}:function(b,c,g){var h,i,j,k=[w,f];if(g){while(b=b[d])if((1===b.nodeType||e)&&a(b,c,g))return!0}else while(b=b[d])if(1===b.nodeType||e){if(j=b[u]||(b[u]={}),i=j[b.uniqueID]||(j[b.uniqueID]={}),(h=i[d])&&h[0]===w&&h[1]===f)return k[2]=h[2];if(i[d]=k,k[2]=a(b,c,g))return!0}}}function sa(a){return a.length>1?function(b,c,d){var e=a.length;while(e--)if(!a[e](b,c,d))return!1;return!0}:a[0]}function ta(a,b,c){for(var d=0,e=b.length;e>d;d++)fa(a,b[d],c);return c}function ua(a,b,c,d,e){for(var f,g=[],h=0,i=a.length,j=null!=b;i>h;h++)(f=a[h])&&(c&&!c(f,d,e)||(g.push(f),j&&b.push(h)));return g}function va(a,b,c,d,e,f){return d&&!d[u]&&(d=va(d)),e&&!e[u]&&(e=va(e,f)),ha(function(f,g,h,i){var j,k,l,m=[],n=[],o=g.length,p=f||ta(b||"*",h.nodeType?[h]:h,[]),q=!a||!f&&b?p:ua(p,m,a,h,i),r=c?e||(f?a:o||d)?[]:g:q;if(c&&c(q,r,h,i),d){j=ua(r,n),d(j,[],h,i),k=j.length;while(k--)(l=j[k])&&(r[n[k]]=!(q[n[k]]=l))}if(f){if(e||a){if(e){j=[],k=r.length;while(k--)(l=r[k])&&j.push(q[k]=l);e(null,r=[],j,i)}k=r.length;while(k--)(l=r[k])&&(j=e?J(f,l):m[k])>-1&&(f[j]=!(g[j]=l))}}else r=ua(r===g?r.splice(o,r.length):r),e?e(null,g,r,i):H.apply(g,r)})}function wa(a){for(var b,c,e,f=a.length,g=d.relative[a[0].type],h=g||d.relative[" "],i=g?1:0,k=ra(function(a){return a===b},h,!0),l=ra(function(a){return J(b,a)>-1},h,!0),m=[function(a,c,d){var e=!g&&(d||c!==j)||((b=c).nodeType?k(a,c,d):l(a,c,d));return b=null,e}];f>i;i++)if(c=d.relative[a[i].type])m=[ra(sa(m),c)];else{if(c=d.filter[a[i].type].apply(null,a[i].matches),c[u]){for(e=++i;f>e;e++)if(d.relative[a[e].type])break;return va(i>1&&sa(m),i>1&&qa(a.slice(0,i-1).concat({value:" "===a[i-2].type?"*":""})).replace(Q,"$1"),c,e>i&&wa(a.slice(i,e)),f>e&&wa(a=a.slice(e)),f>e&&qa(a))}m.push(c)}return sa(m)}function xa(a,b){var c=b.length>0,e=a.length>0,f=function(f,g,h,i,k){var l,o,q,r=0,s="0",t=f&&[],u=[],v=j,x=f||e&&d.find.TAG("*",k),y=w+=null==v?1:Math.random()||.1,z=x.length;for(k&&(j=g===n||g||k);s!==z&&null!=(l=x[s]);s++){if(e&&l){o=0,g||l.ownerDocument===n||(m(l),h=!p);while(q=a[o++])if(q(l,g||n,h)){i.push(l);break}k&&(w=y)}c&&((l=!q&&l)&&r--,f&&t.push(l))}if(r+=s,c&&s!==r){o=0;while(q=b[o++])q(t,u,g,h);if(f){if(r>0)while(s--)t[s]||u[s]||(u[s]=F.call(i));u=ua(u)}H.apply(i,u),k&&!f&&u.length>0&&r+b.length>1&&fa.uniqueSort(i)}return k&&(w=y,j=v),t};return c?ha(f):f}return h=fa.compile=function(a,b){var c,d=[],e=[],f=A[a+" "];if(!f){b||(b=g(a)),c=b.length;while(c--)f=wa(b[c]),f[u]?d.push(f):e.push(f);f=A(a,xa(e,d)),f.selector=a}return f},i=fa.select=function(a,b,e,f){var i,j,k,l,m,n="function"==typeof a&&a,o=!f&&g(a=n.selector||a);if(e=e||[],1===o.length){if(j=o[0]=o[0].slice(0),j.length>2&&"ID"===(k=j[0]).type&&c.getById&&9===b.nodeType&&p&&d.relative[j[1].type]){if(b=(d.find.ID(k.matches[0].replace(ba,ca),b)||[])[0],!b)return e;n&&(b=b.parentNode),a=a.slice(j.shift().value.length)}i=W.needsContext.test(a)?0:j.length;while(i--){if(k=j[i],d.relative[l=k.type])break;if((m=d.find[l])&&(f=m(k.matches[0].replace(ba,ca),_.test(j[0].type)&&oa(b.parentNode)||b))){if(j.splice(i,1),a=f.length&&qa(j),!a)return H.apply(e,f),e;break}}}return(n||h(a,o))(f,b,!p,e,!b||_.test(a)&&oa(b.parentNode)||b),e},c.sortStable=u.split("").sort(B).join("")===u,c.detectDuplicates=!!l,m(),c.sortDetached=ia(function(a){return 1&a.compareDocumentPosition(n.createElement("div"))}),ia(function(a){return a.innerHTML="","#"===a.firstChild.getAttribute("href")})||ja("type|href|height|width",function(a,b,c){return c?void 0:a.getAttribute(b,"type"===b.toLowerCase()?1:2)}),c.attributes&&ia(function(a){return a.innerHTML="",a.firstChild.setAttribute("value",""),""===a.firstChild.getAttribute("value")})||ja("value",function(a,b,c){return c||"input"!==a.nodeName.toLowerCase()?void 0:a.defaultValue}),ia(function(a){return null==a.getAttribute("disabled")})||ja(K,function(a,b,c){var d;return c?void 0:a[b]===!0?b.toLowerCase():(d=a.getAttributeNode(b))&&d.specified?d.value:null}),fa}(a);n.find=t,n.expr=t.selectors,n.expr[":"]=n.expr.pseudos,n.uniqueSort=n.unique=t.uniqueSort,n.text=t.getText,n.isXMLDoc=t.isXML,n.contains=t.contains;var u=function(a,b,c){var d=[],e=void 0!==c;while((a=a[b])&&9!==a.nodeType)if(1===a.nodeType){if(e&&n(a).is(c))break;d.push(a)}return d},v=function(a,b){for(var c=[];a;a=a.nextSibling)1===a.nodeType&&a!==b&&c.push(a);return c},w=n.expr.match.needsContext,x=/^<([\w-]+)\s*\/?>(?:<\/\1>|)$/,y=/^.[^:#\[\.,]*$/;function z(a,b,c){if(n.isFunction(b))return n.grep(a,function(a,d){return!!b.call(a,d,a)!==c});if(b.nodeType)return n.grep(a,function(a){return a===b!==c});if("string"==typeof b){if(y.test(b))return n.filter(b,a,c);b=n.filter(b,a)}return n.grep(a,function(a){return n.inArray(a,b)>-1!==c})}n.filter=function(a,b,c){var d=b[0];return c&&(a=":not("+a+")"),1===b.length&&1===d.nodeType?n.find.matchesSelector(d,a)?[d]:[]:n.find.matches(a,n.grep(b,function(a){return 1===a.nodeType}))},n.fn.extend({find:function(a){var b,c=[],d=this,e=d.length;if("string"!=typeof a)return this.pushStack(n(a).filter(function(){for(b=0;e>b;b++)if(n.contains(d[b],this))return!0}));for(b=0;e>b;b++)n.find(a,d[b],c);return c=this.pushStack(e>1?n.unique(c):c),c.selector=this.selector?this.selector+" "+a:a,c},filter:function(a){return this.pushStack(z(this,a||[],!1))},not:function(a){return this.pushStack(z(this,a||[],!0))},is:function(a){return!!z(this,"string"==typeof a&&w.test(a)?n(a):a||[],!1).length}});var A,B=/^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,C=n.fn.init=function(a,b,c){var e,f;if(!a)return this;if(c=c||A,"string"==typeof a){if(e="<"===a.charAt(0)&&">"===a.charAt(a.length-1)&&a.length>=3?[null,a,null]:B.exec(a),!e||!e[1]&&b)return!b||b.jquery?(b||c).find(a):this.constructor(b).find(a);if(e[1]){if(b=b instanceof n?b[0]:b,n.merge(this,n.parseHTML(e[1],b&&b.nodeType?b.ownerDocument||b:d,!0)),x.test(e[1])&&n.isPlainObject(b))for(e in b)n.isFunction(this[e])?this[e](b[e]):this.attr(e,b[e]);return this}if(f=d.getElementById(e[2]),f&&f.parentNode){if(f.id!==e[2])return A.find(a);this.length=1,this[0]=f}return this.context=d,this.selector=a,this}return a.nodeType?(this.context=this[0]=a,this.length=1,this):n.isFunction(a)?"undefined"!=typeof c.ready?c.ready(a):a(n):(void 0!==a.selector&&(this.selector=a.selector,this.context=a.context),n.makeArray(a,this))};C.prototype=n.fn,A=n(d);var D=/^(?:parents|prev(?:Until|All))/,E={children:!0,contents:!0,next:!0,prev:!0};n.fn.extend({has:function(a){var b,c=n(a,this),d=c.length;return this.filter(function(){for(b=0;d>b;b++)if(n.contains(this,c[b]))return!0})},closest:function(a,b){for(var c,d=0,e=this.length,f=[],g=w.test(a)||"string"!=typeof a?n(a,b||this.context):0;e>d;d++)for(c=this[d];c&&c!==b;c=c.parentNode)if(c.nodeType<11&&(g?g.index(c)>-1:1===c.nodeType&&n.find.matchesSelector(c,a))){f.push(c);break}return this.pushStack(f.length>1?n.uniqueSort(f):f)},index:function(a){return a?"string"==typeof a?n.inArray(this[0],n(a)):n.inArray(a.jquery?a[0]:a,this):this[0]&&this[0].parentNode?this.first().prevAll().length:-1},add:function(a,b){return this.pushStack(n.uniqueSort(n.merge(this.get(),n(a,b))))},addBack:function(a){return this.add(null==a?this.prevObject:this.prevObject.filter(a))}});function F(a,b){do a=a[b];while(a&&1!==a.nodeType);return a}n.each({parent:function(a){var b=a.parentNode;return b&&11!==b.nodeType?b:null},parents:function(a){return u(a,"parentNode")},parentsUntil:function(a,b,c){return u(a,"parentNode",c)},next:function(a){return F(a,"nextSibling")},prev:function(a){return F(a,"previousSibling")},nextAll:function(a){return u(a,"nextSibling")},prevAll:function(a){return u(a,"previousSibling")},nextUntil:function(a,b,c){return u(a,"nextSibling",c)},prevUntil:function(a,b,c){return u(a,"previousSibling",c)},siblings:function(a){return v((a.parentNode||{}).firstChild,a)},children:function(a){return v(a.firstChild)},contents:function(a){return n.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:n.merge([],a.childNodes)}},function(a,b){n.fn[a]=function(c,d){var e=n.map(this,b,c);return"Until"!==a.slice(-5)&&(d=c),d&&"string"==typeof d&&(e=n.filter(d,e)),this.length>1&&(E[a]||(e=n.uniqueSort(e)),D.test(a)&&(e=e.reverse())),this.pushStack(e)}});var G=/\S+/g;function H(a){var b={};return n.each(a.match(G)||[],function(a,c){b[c]=!0}),b}n.Callbacks=function(a){a="string"==typeof a?H(a):n.extend({},a);var b,c,d,e,f=[],g=[],h=-1,i=function(){for(e=a.once,d=b=!0;g.length;h=-1){c=g.shift();while(++h-1)f.splice(c,1),h>=c&&h--}),this},has:function(a){return a?n.inArray(a,f)>-1:f.length>0},empty:function(){return f&&(f=[]),this},disable:function(){return e=g=[],f=c="",this},disabled:function(){return!f},lock:function(){return e=!0,c||j.disable(),this},locked:function(){return!!e},fireWith:function(a,c){return e||(c=c||[],c=[a,c.slice?c.slice():c],g.push(c),b||i()),this},fire:function(){return j.fireWith(this,arguments),this},fired:function(){return!!d}};return j},n.extend({Deferred:function(a){var b=[["resolve","done",n.Callbacks("once memory"),"resolved"],["reject","fail",n.Callbacks("once memory"),"rejected"],["notify","progress",n.Callbacks("memory")]],c="pending",d={state:function(){return c},always:function(){return e.done(arguments).fail(arguments),this},then:function(){var a=arguments;return n.Deferred(function(c){n.each(b,function(b,f){var g=n.isFunction(a[b])&&a[b];e[f[1]](function(){var a=g&&g.apply(this,arguments);a&&n.isFunction(a.promise)?a.promise().progress(c.notify).done(c.resolve).fail(c.reject):c[f[0]+"With"](this===d?c.promise():this,g?[a]:arguments)})}),a=null}).promise()},promise:function(a){return null!=a?n.extend(a,d):d}},e={};return d.pipe=d.then,n.each(b,function(a,f){var g=f[2],h=f[3];d[f[1]]=g.add,h&&g.add(function(){c=h},b[1^a][2].disable,b[2][2].lock),e[f[0]]=function(){return e[f[0]+"With"](this===e?d:this,arguments),this},e[f[0]+"With"]=g.fireWith}),d.promise(e),a&&a.call(e,e),e},when:function(a){var b=0,c=e.call(arguments),d=c.length,f=1!==d||a&&n.isFunction(a.promise)?d:0,g=1===f?a:n.Deferred(),h=function(a,b,c){return function(d){b[a]=this,c[a]=arguments.length>1?e.call(arguments):d,c===i?g.notifyWith(b,c):--f||g.resolveWith(b,c)}},i,j,k;if(d>1)for(i=new Array(d),j=new Array(d),k=new Array(d);d>b;b++)c[b]&&n.isFunction(c[b].promise)?c[b].promise().progress(h(b,j,i)).done(h(b,k,c)).fail(g.reject):--f;return f||g.resolveWith(k,c),g.promise()}});var I;n.fn.ready=function(a){return n.ready.promise().done(a),this},n.extend({isReady:!1,readyWait:1,holdReady:function(a){a?n.readyWait++:n.ready(!0)},ready:function(a){(a===!0?--n.readyWait:n.isReady)||(n.isReady=!0,a!==!0&&--n.readyWait>0||(I.resolveWith(d,[n]),n.fn.triggerHandler&&(n(d).triggerHandler("ready"),n(d).off("ready"))))}});function J(){d.addEventListener?(d.removeEventListener("DOMContentLoaded",K),a.removeEventListener("load",K)):(d.detachEvent("onreadystatechange",K),a.detachEvent("onload",K))}function K(){(d.addEventListener||"load"===a.event.type||"complete"===d.readyState)&&(J(),n.ready())}n.ready.promise=function(b){if(!I)if(I=n.Deferred(),"complete"===d.readyState||"loading"!==d.readyState&&!d.documentElement.doScroll)a.setTimeout(n.ready);else if(d.addEventListener)d.addEventListener("DOMContentLoaded",K),a.addEventListener("load",K);else{d.attachEvent("onreadystatechange",K),a.attachEvent("onload",K);var c=!1;try{c=null==a.frameElement&&d.documentElement}catch(e){}c&&c.doScroll&&!function f(){if(!n.isReady){try{c.doScroll("left")}catch(b){return a.setTimeout(f,50)}J(),n.ready()}}()}return I.promise(b)},n.ready.promise();var L;for(L in n(l))break;l.ownFirst="0"===L,l.inlineBlockNeedsLayout=!1,n(function(){var a,b,c,e;c=d.getElementsByTagName("body")[0],c&&c.style&&(b=d.createElement("div"),e=d.createElement("div"),e.style.cssText="position:absolute;border:0;width:0;height:0;top:0;left:-9999px",c.appendChild(e).appendChild(b),"undefined"!=typeof b.style.zoom&&(b.style.cssText="display:inline;margin:0;border:0;padding:1px;width:1px;zoom:1",l.inlineBlockNeedsLayout=a=3===b.offsetWidth,a&&(c.style.zoom=1)),c.removeChild(e))}),function(){var a=d.createElement("div");l.deleteExpando=!0;try{delete a.test}catch(b){l.deleteExpando=!1}a=null}();var M=function(a){var b=n.noData[(a.nodeName+" ").toLowerCase()],c=+a.nodeType||1;return 1!==c&&9!==c?!1:!b||b!==!0&&a.getAttribute("classid")===b},N=/^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,O=/([A-Z])/g;function P(a,b,c){if(void 0===c&&1===a.nodeType){var d="data-"+b.replace(O,"-$1").toLowerCase();if(c=a.getAttribute(d),"string"==typeof c){try{c="true"===c?!0:"false"===c?!1:"null"===c?null:+c+""===c?+c:N.test(c)?n.parseJSON(c):c}catch(e){}n.data(a,b,c)}else c=void 0; -}return c}function Q(a){var b;for(b in a)if(("data"!==b||!n.isEmptyObject(a[b]))&&"toJSON"!==b)return!1;return!0}function R(a,b,d,e){if(M(a)){var f,g,h=n.expando,i=a.nodeType,j=i?n.cache:a,k=i?a[h]:a[h]&&h;if(k&&j[k]&&(e||j[k].data)||void 0!==d||"string"!=typeof b)return k||(k=i?a[h]=c.pop()||n.guid++:h),j[k]||(j[k]=i?{}:{toJSON:n.noop}),"object"!=typeof b&&"function"!=typeof b||(e?j[k]=n.extend(j[k],b):j[k].data=n.extend(j[k].data,b)),g=j[k],e||(g.data||(g.data={}),g=g.data),void 0!==d&&(g[n.camelCase(b)]=d),"string"==typeof b?(f=g[b],null==f&&(f=g[n.camelCase(b)])):f=g,f}}function S(a,b,c){if(M(a)){var d,e,f=a.nodeType,g=f?n.cache:a,h=f?a[n.expando]:n.expando;if(g[h]){if(b&&(d=c?g[h]:g[h].data)){n.isArray(b)?b=b.concat(n.map(b,n.camelCase)):b in d?b=[b]:(b=n.camelCase(b),b=b in d?[b]:b.split(" ")),e=b.length;while(e--)delete d[b[e]];if(c?!Q(d):!n.isEmptyObject(d))return}(c||(delete g[h].data,Q(g[h])))&&(f?n.cleanData([a],!0):l.deleteExpando||g!=g.window?delete g[h]:g[h]=void 0)}}}n.extend({cache:{},noData:{"applet ":!0,"embed ":!0,"object ":"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"},hasData:function(a){return a=a.nodeType?n.cache[a[n.expando]]:a[n.expando],!!a&&!Q(a)},data:function(a,b,c){return R(a,b,c)},removeData:function(a,b){return S(a,b)},_data:function(a,b,c){return R(a,b,c,!0)},_removeData:function(a,b){return S(a,b,!0)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=n.data(f),1===f.nodeType&&!n._data(f,"parsedAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),P(f,d,e[d])));n._data(f,"parsedAttrs",!0)}return e}return"object"==typeof a?this.each(function(){n.data(this,a)}):arguments.length>1?this.each(function(){n.data(this,a,b)}):f?P(f,a,n.data(f,a)):void 0},removeData:function(a){return this.each(function(){n.removeData(this,a)})}}),n.extend({queue:function(a,b,c){var d;return a?(b=(b||"fx")+"queue",d=n._data(a,b),c&&(!d||n.isArray(c)?d=n._data(a,b,n.makeArray(c)):d.push(c)),d||[]):void 0},dequeue:function(a,b){b=b||"fx";var c=n.queue(a,b),d=c.length,e=c.shift(),f=n._queueHooks(a,b),g=function(){n.dequeue(a,b)};"inprogress"===e&&(e=c.shift(),d--),e&&("fx"===b&&c.unshift("inprogress"),delete f.stop,e.call(a,g,f)),!d&&f&&f.empty.fire()},_queueHooks:function(a,b){var c=b+"queueHooks";return n._data(a,c)||n._data(a,c,{empty:n.Callbacks("once memory").add(function(){n._removeData(a,b+"queue"),n._removeData(a,c)})})}}),n.fn.extend({queue:function(a,b){var c=2;return"string"!=typeof a&&(b=a,a="fx",c--),arguments.lengthh;h++)b(a[h],c,g?d:d.call(a[h],h,b(a[h],c)));return e?a:j?b.call(a):i?b(a[0],c):f},Z=/^(?:checkbox|radio)$/i,$=/<([\w:-]+)/,_=/^$|\/(?:java|ecma)script/i,aa=/^\s+/,ba="abbr|article|aside|audio|bdi|canvas|data|datalist|details|dialog|figcaption|figure|footer|header|hgroup|main|mark|meter|nav|output|picture|progress|section|summary|template|time|video";function ca(a){var b=ba.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}!function(){var a=d.createElement("div"),b=d.createDocumentFragment(),c=d.createElement("input");a.innerHTML="
a",l.leadingWhitespace=3===a.firstChild.nodeType,l.tbody=!a.getElementsByTagName("tbody").length,l.htmlSerialize=!!a.getElementsByTagName("link").length,l.html5Clone="<:nav>"!==d.createElement("nav").cloneNode(!0).outerHTML,c.type="checkbox",c.checked=!0,b.appendChild(c),l.appendChecked=c.checked,a.innerHTML="",l.noCloneChecked=!!a.cloneNode(!0).lastChild.defaultValue,b.appendChild(a),c=d.createElement("input"),c.setAttribute("type","radio"),c.setAttribute("checked","checked"),c.setAttribute("name","t"),a.appendChild(c),l.checkClone=a.cloneNode(!0).cloneNode(!0).lastChild.checked,l.noCloneEvent=!!a.addEventListener,a[n.expando]=1,l.attributes=!a.getAttribute(n.expando)}();var da={option:[1,""],legend:[1,"
","
"],area:[1,"",""],param:[1,"",""],thead:[1,"","
"],tr:[2,"","
"],col:[2,"","
"],td:[3,"","
"],_default:l.htmlSerialize?[0,"",""]:[1,"X
","
"]};da.optgroup=da.option,da.tbody=da.tfoot=da.colgroup=da.caption=da.thead,da.th=da.td;function ea(a,b){var c,d,e=0,f="undefined"!=typeof a.getElementsByTagName?a.getElementsByTagName(b||"*"):"undefined"!=typeof a.querySelectorAll?a.querySelectorAll(b||"*"):void 0;if(!f)for(f=[],c=a.childNodes||a;null!=(d=c[e]);e++)!b||n.nodeName(d,b)?f.push(d):n.merge(f,ea(d,b));return void 0===b||b&&n.nodeName(a,b)?n.merge([a],f):f}function fa(a,b){for(var c,d=0;null!=(c=a[d]);d++)n._data(c,"globalEval",!b||n._data(b[d],"globalEval"))}var ga=/<|&#?\w+;/,ha=/r;r++)if(g=a[r],g||0===g)if("object"===n.type(g))n.merge(q,g.nodeType?[g]:g);else if(ga.test(g)){i=i||p.appendChild(b.createElement("div")),j=($.exec(g)||["",""])[1].toLowerCase(),m=da[j]||da._default,i.innerHTML=m[1]+n.htmlPrefilter(g)+m[2],f=m[0];while(f--)i=i.lastChild;if(!l.leadingWhitespace&&aa.test(g)&&q.push(b.createTextNode(aa.exec(g)[0])),!l.tbody){g="table"!==j||ha.test(g)?""!==m[1]||ha.test(g)?0:i:i.firstChild,f=g&&g.childNodes.length;while(f--)n.nodeName(k=g.childNodes[f],"tbody")&&!k.childNodes.length&&g.removeChild(k)}n.merge(q,i.childNodes),i.textContent="";while(i.firstChild)i.removeChild(i.firstChild);i=p.lastChild}else q.push(b.createTextNode(g));i&&p.removeChild(i),l.appendChecked||n.grep(ea(q,"input"),ia),r=0;while(g=q[r++])if(d&&n.inArray(g,d)>-1)e&&e.push(g);else if(h=n.contains(g.ownerDocument,g),i=ea(p.appendChild(g),"script"),h&&fa(i),c){f=0;while(g=i[f++])_.test(g.type||"")&&c.push(g)}return i=null,p}!function(){var b,c,e=d.createElement("div");for(b in{submit:!0,change:!0,focusin:!0})c="on"+b,(l[b]=c in a)||(e.setAttribute(c,"t"),l[b]=e.attributes[c].expando===!1);e=null}();var ka=/^(?:input|select|textarea)$/i,la=/^key/,ma=/^(?:mouse|pointer|contextmenu|drag|drop)|click/,na=/^(?:focusinfocus|focusoutblur)$/,oa=/^([^.]*)(?:\.(.+)|)/;function pa(){return!0}function qa(){return!1}function ra(){try{return d.activeElement}catch(a){}}function sa(a,b,c,d,e,f){var g,h;if("object"==typeof b){"string"!=typeof c&&(d=d||c,c=void 0);for(h in b)sa(a,h,c,d,b[h],f);return a}if(null==d&&null==e?(e=c,d=c=void 0):null==e&&("string"==typeof c?(e=d,d=void 0):(e=d,d=c,c=void 0)),e===!1)e=qa;else if(!e)return a;return 1===f&&(g=e,e=function(a){return n().off(a),g.apply(this,arguments)},e.guid=g.guid||(g.guid=n.guid++)),a.each(function(){n.event.add(this,b,e,d,c)})}n.event={global:{},add:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=n._data(a);if(r){c.handler&&(i=c,c=i.handler,e=i.selector),c.guid||(c.guid=n.guid++),(g=r.events)||(g=r.events={}),(k=r.handle)||(k=r.handle=function(a){return"undefined"==typeof n||a&&n.event.triggered===a.type?void 0:n.event.dispatch.apply(k.elem,arguments)},k.elem=a),b=(b||"").match(G)||[""],h=b.length;while(h--)f=oa.exec(b[h])||[],o=q=f[1],p=(f[2]||"").split(".").sort(),o&&(j=n.event.special[o]||{},o=(e?j.delegateType:j.bindType)||o,j=n.event.special[o]||{},l=n.extend({type:o,origType:q,data:d,handler:c,guid:c.guid,selector:e,needsContext:e&&n.expr.match.needsContext.test(e),namespace:p.join(".")},i),(m=g[o])||(m=g[o]=[],m.delegateCount=0,j.setup&&j.setup.call(a,d,p,k)!==!1||(a.addEventListener?a.addEventListener(o,k,!1):a.attachEvent&&a.attachEvent("on"+o,k))),j.add&&(j.add.call(a,l),l.handler.guid||(l.handler.guid=c.guid)),e?m.splice(m.delegateCount++,0,l):m.push(l),n.event.global[o]=!0);a=null}},remove:function(a,b,c,d,e){var f,g,h,i,j,k,l,m,o,p,q,r=n.hasData(a)&&n._data(a);if(r&&(k=r.events)){b=(b||"").match(G)||[""],j=b.length;while(j--)if(h=oa.exec(b[j])||[],o=q=h[1],p=(h[2]||"").split(".").sort(),o){l=n.event.special[o]||{},o=(d?l.delegateType:l.bindType)||o,m=k[o]||[],h=h[2]&&new RegExp("(^|\\.)"+p.join("\\.(?:.*\\.|)")+"(\\.|$)"),i=f=m.length;while(f--)g=m[f],!e&&q!==g.origType||c&&c.guid!==g.guid||h&&!h.test(g.namespace)||d&&d!==g.selector&&("**"!==d||!g.selector)||(m.splice(f,1),g.selector&&m.delegateCount--,l.remove&&l.remove.call(a,g));i&&!m.length&&(l.teardown&&l.teardown.call(a,p,r.handle)!==!1||n.removeEvent(a,o,r.handle),delete k[o])}else for(o in k)n.event.remove(a,o+b[j],c,d,!0);n.isEmptyObject(k)&&(delete r.handle,n._removeData(a,"events"))}},trigger:function(b,c,e,f){var g,h,i,j,l,m,o,p=[e||d],q=k.call(b,"type")?b.type:b,r=k.call(b,"namespace")?b.namespace.split("."):[];if(i=m=e=e||d,3!==e.nodeType&&8!==e.nodeType&&!na.test(q+n.event.triggered)&&(q.indexOf(".")>-1&&(r=q.split("."),q=r.shift(),r.sort()),h=q.indexOf(":")<0&&"on"+q,b=b[n.expando]?b:new n.Event(q,"object"==typeof b&&b),b.isTrigger=f?2:3,b.namespace=r.join("."),b.rnamespace=b.namespace?new RegExp("(^|\\.)"+r.join("\\.(?:.*\\.|)")+"(\\.|$)"):null,b.result=void 0,b.target||(b.target=e),c=null==c?[b]:n.makeArray(c,[b]),l=n.event.special[q]||{},f||!l.trigger||l.trigger.apply(e,c)!==!1)){if(!f&&!l.noBubble&&!n.isWindow(e)){for(j=l.delegateType||q,na.test(j+q)||(i=i.parentNode);i;i=i.parentNode)p.push(i),m=i;m===(e.ownerDocument||d)&&p.push(m.defaultView||m.parentWindow||a)}o=0;while((i=p[o++])&&!b.isPropagationStopped())b.type=o>1?j:l.bindType||q,g=(n._data(i,"events")||{})[b.type]&&n._data(i,"handle"),g&&g.apply(i,c),g=h&&i[h],g&&g.apply&&M(i)&&(b.result=g.apply(i,c),b.result===!1&&b.preventDefault());if(b.type=q,!f&&!b.isDefaultPrevented()&&(!l._default||l._default.apply(p.pop(),c)===!1)&&M(e)&&h&&e[q]&&!n.isWindow(e)){m=e[h],m&&(e[h]=null),n.event.triggered=q;try{e[q]()}catch(s){}n.event.triggered=void 0,m&&(e[h]=m)}return b.result}},dispatch:function(a){a=n.event.fix(a);var b,c,d,f,g,h=[],i=e.call(arguments),j=(n._data(this,"events")||{})[a.type]||[],k=n.event.special[a.type]||{};if(i[0]=a,a.delegateTarget=this,!k.preDispatch||k.preDispatch.call(this,a)!==!1){h=n.event.handlers.call(this,a,j),b=0;while((f=h[b++])&&!a.isPropagationStopped()){a.currentTarget=f.elem,c=0;while((g=f.handlers[c++])&&!a.isImmediatePropagationStopped())a.rnamespace&&!a.rnamespace.test(g.namespace)||(a.handleObj=g,a.data=g.data,d=((n.event.special[g.origType]||{}).handle||g.handler).apply(f.elem,i),void 0!==d&&(a.result=d)===!1&&(a.preventDefault(),a.stopPropagation()))}return k.postDispatch&&k.postDispatch.call(this,a),a.result}},handlers:function(a,b){var c,d,e,f,g=[],h=b.delegateCount,i=a.target;if(h&&i.nodeType&&("click"!==a.type||isNaN(a.button)||a.button<1))for(;i!=this;i=i.parentNode||this)if(1===i.nodeType&&(i.disabled!==!0||"click"!==a.type)){for(d=[],c=0;h>c;c++)f=b[c],e=f.selector+" ",void 0===d[e]&&(d[e]=f.needsContext?n(e,this).index(i)>-1:n.find(e,this,null,[i]).length),d[e]&&d.push(f);d.length&&g.push({elem:i,handlers:d})}return h]","i"),va=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:-]+)[^>]*)\/>/gi,wa=/\s*$/g,Aa=ca(d),Ba=Aa.appendChild(d.createElement("div"));function Ca(a,b){return n.nodeName(a,"table")&&n.nodeName(11!==b.nodeType?b:b.firstChild,"tr")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function Da(a){return a.type=(null!==n.find.attr(a,"type"))+"/"+a.type,a}function Ea(a){var b=ya.exec(a.type);return b?a.type=b[1]:a.removeAttribute("type"),a}function Fa(a,b){if(1===b.nodeType&&n.hasData(a)){var c,d,e,f=n._data(a),g=n._data(b,f),h=f.events;if(h){delete g.handle,g.events={};for(c in h)for(d=0,e=h[c].length;e>d;d++)n.event.add(b,c,h[c][d])}g.data&&(g.data=n.extend({},g.data))}}function Ga(a,b){var c,d,e;if(1===b.nodeType){if(c=b.nodeName.toLowerCase(),!l.noCloneEvent&&b[n.expando]){e=n._data(b);for(d in e.events)n.removeEvent(b,d,e.handle);b.removeAttribute(n.expando)}"script"===c&&b.text!==a.text?(Da(b).text=a.text,Ea(b)):"object"===c?(b.parentNode&&(b.outerHTML=a.outerHTML),l.html5Clone&&a.innerHTML&&!n.trim(b.innerHTML)&&(b.innerHTML=a.innerHTML)):"input"===c&&Z.test(a.type)?(b.defaultChecked=b.checked=a.checked,b.value!==a.value&&(b.value=a.value)):"option"===c?b.defaultSelected=b.selected=a.defaultSelected:"input"!==c&&"textarea"!==c||(b.defaultValue=a.defaultValue)}}function Ha(a,b,c,d){b=f.apply([],b);var e,g,h,i,j,k,m=0,o=a.length,p=o-1,q=b[0],r=n.isFunction(q);if(r||o>1&&"string"==typeof q&&!l.checkClone&&xa.test(q))return a.each(function(e){var f=a.eq(e);r&&(b[0]=q.call(this,e,f.html())),Ha(f,b,c,d)});if(o&&(k=ja(b,a[0].ownerDocument,!1,a,d),e=k.firstChild,1===k.childNodes.length&&(k=e),e||d)){for(i=n.map(ea(k,"script"),Da),h=i.length;o>m;m++)g=k,m!==p&&(g=n.clone(g,!0,!0),h&&n.merge(i,ea(g,"script"))),c.call(a[m],g,m);if(h)for(j=i[i.length-1].ownerDocument,n.map(i,Ea),m=0;h>m;m++)g=i[m],_.test(g.type||"")&&!n._data(g,"globalEval")&&n.contains(j,g)&&(g.src?n._evalUrl&&n._evalUrl(g.src):n.globalEval((g.text||g.textContent||g.innerHTML||"").replace(za,"")));k=e=null}return a}function Ia(a,b,c){for(var d,e=b?n.filter(b,a):a,f=0;null!=(d=e[f]);f++)c||1!==d.nodeType||n.cleanData(ea(d)),d.parentNode&&(c&&n.contains(d.ownerDocument,d)&&fa(ea(d,"script")),d.parentNode.removeChild(d));return a}n.extend({htmlPrefilter:function(a){return a.replace(va,"<$1>")},clone:function(a,b,c){var d,e,f,g,h,i=n.contains(a.ownerDocument,a);if(l.html5Clone||n.isXMLDoc(a)||!ua.test("<"+a.nodeName+">")?f=a.cloneNode(!0):(Ba.innerHTML=a.outerHTML,Ba.removeChild(f=Ba.firstChild)),!(l.noCloneEvent&&l.noCloneChecked||1!==a.nodeType&&11!==a.nodeType||n.isXMLDoc(a)))for(d=ea(f),h=ea(a),g=0;null!=(e=h[g]);++g)d[g]&&Ga(e,d[g]);if(b)if(c)for(h=h||ea(a),d=d||ea(f),g=0;null!=(e=h[g]);g++)Fa(e,d[g]);else Fa(a,f);return d=ea(f,"script"),d.length>0&&fa(d,!i&&ea(a,"script")),d=h=e=null,f},cleanData:function(a,b){for(var d,e,f,g,h=0,i=n.expando,j=n.cache,k=l.attributes,m=n.event.special;null!=(d=a[h]);h++)if((b||M(d))&&(f=d[i],g=f&&j[f])){if(g.events)for(e in g.events)m[e]?n.event.remove(d,e):n.removeEvent(d,e,g.handle);j[f]&&(delete j[f],k||"undefined"==typeof d.removeAttribute?d[i]=void 0:d.removeAttribute(i),c.push(f))}}}),n.fn.extend({domManip:Ha,detach:function(a){return Ia(this,a,!0)},remove:function(a){return Ia(this,a)},text:function(a){return Y(this,function(a){return void 0===a?n.text(this):this.empty().append((this[0]&&this[0].ownerDocument||d).createTextNode(a))},null,a,arguments.length)},append:function(){return Ha(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ca(this,a);b.appendChild(a)}})},prepend:function(){return Ha(this,arguments,function(a){if(1===this.nodeType||11===this.nodeType||9===this.nodeType){var b=Ca(this,a);b.insertBefore(a,b.firstChild)}})},before:function(){return Ha(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this)})},after:function(){return Ha(this,arguments,function(a){this.parentNode&&this.parentNode.insertBefore(a,this.nextSibling)})},empty:function(){for(var a,b=0;null!=(a=this[b]);b++){1===a.nodeType&&n.cleanData(ea(a,!1));while(a.firstChild)a.removeChild(a.firstChild);a.options&&n.nodeName(a,"select")&&(a.options.length=0)}return this},clone:function(a,b){return a=null==a?!1:a,b=null==b?a:b,this.map(function(){return n.clone(this,a,b)})},html:function(a){return Y(this,function(a){var b=this[0]||{},c=0,d=this.length;if(void 0===a)return 1===b.nodeType?b.innerHTML.replace(ta,""):void 0;if("string"==typeof a&&!wa.test(a)&&(l.htmlSerialize||!ua.test(a))&&(l.leadingWhitespace||!aa.test(a))&&!da[($.exec(a)||["",""])[1].toLowerCase()]){a=n.htmlPrefilter(a);try{for(;d>c;c++)b=this[c]||{},1===b.nodeType&&(n.cleanData(ea(b,!1)),b.innerHTML=a);b=0}catch(e){}}b&&this.empty().append(a)},null,a,arguments.length)},replaceWith:function(){var a=[];return Ha(this,arguments,function(b){var c=this.parentNode;n.inArray(this,a)<0&&(n.cleanData(ea(this)),c&&c.replaceChild(b,this))},a)}}),n.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){n.fn[a]=function(a){for(var c,d=0,e=[],f=n(a),h=f.length-1;h>=d;d++)c=d===h?this:this.clone(!0),n(f[d])[b](c),g.apply(e,c.get());return this.pushStack(e)}});var Ja,Ka={HTML:"block",BODY:"block"};function La(a,b){var c=n(b.createElement(a)).appendTo(b.body),d=n.css(c[0],"display");return c.detach(),d}function Ma(a){var b=d,c=Ka[a];return c||(c=La(a,b),"none"!==c&&c||(Ja=(Ja||n("