empresa-libre/source/app/controllers/utils.py

991 lines
28 KiB
Python
Raw Normal View History

2019-02-03 23:32:48 -06:00
#!/usr/bin/env python3
# ~ Empresa Libre
# ~ Copyright (C) 2016-2019 Mauricio Baeza Servin (public@elmau.net)
# ~
# ~ This program is free software: you can redistribute it and/or modify
# ~ it under the terms of the GNU General Public License as published by
# ~ the Free Software Foundation, either version 3 of the License, or
# ~ (at your option) any later version.
# ~
# ~ This program is distributed in the hope that it will be useful,
# ~ but WITHOUT ANY WARRANTY; without even the implied warranty of
# ~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# ~ GNU General Public License for more details.
# ~
# ~ You should have received a copy of the GNU General Public License
# ~ along with this program. If not, see <http://www.gnu.org/licenses/>.
import base64
2019-02-15 14:38:41 -06:00
import collections
2021-02-10 22:01:39 -06:00
import csv
2019-02-15 17:51:13 -06:00
import datetime
2020-01-21 23:25:13 -06:00
import getpass
2021-06-29 19:07:55 -05:00
import io
2020-01-07 00:32:48 -06:00
import json
import logging
2019-02-03 23:32:48 -06:00
import math
2020-01-07 00:32:48 -06:00
import os
2020-01-11 22:14:38 -06:00
import shlex
2020-01-07 00:32:48 -06:00
import shutil
2019-02-05 22:12:19 -06:00
import smtplib
2020-01-07 00:32:48 -06:00
import sqlite3
import ssl
2020-01-07 00:32:48 -06:00
import subprocess
import threading
2021-02-10 22:34:34 -06:00
import unicodedata
2019-02-17 13:29:10 -06:00
import zipfile
2020-01-07 00:32:48 -06:00
from pathlib import Path
2021-02-09 23:01:08 -06:00
from xml.sax.saxutils import escape
2019-02-03 23:32:48 -06:00
2019-02-05 22:12:19 -06:00
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email import encoders
from email.utils import formatdate
2019-02-15 14:38:41 -06:00
from io import BytesIO
2019-02-05 22:12:19 -06:00
2019-02-15 17:51:13 -06:00
import lxml.etree as ET
2019-02-05 22:12:19 -06:00
import requests
2019-02-15 17:51:13 -06:00
2019-02-03 23:32:48 -06:00
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
2019-02-15 17:51:13 -06:00
from dateutil import parser
2019-02-03 23:32:48 -06:00
2020-12-31 12:01:41 -06:00
from .cfdi_xml import CFDI
2021-01-02 22:23:33 -06:00
from settings import DEBUG, DB_COMPANIES, PATHS, TEMPLATE_CANCEL, RFCS
2020-12-30 22:45:57 -06:00
2021-01-02 18:16:15 -06:00
from .pacs.cfdi_cert import SATCertificate
2020-12-29 21:53:51 -06:00
from .pacs import PACComercioDigital
2021-01-02 18:16:15 -06:00
from .pacs import PACFinkok
2020-01-11 22:14:38 -06:00
# ~ v2
2021-06-29 19:07:55 -05:00
import segno
from .pycfdi import CfdiRead
2020-01-07 00:32:48 -06:00
LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
LOG_DATE = '%d/%m/%Y %H:%M:%S'
logging.addLevelName(logging.ERROR, '\033[1;41mERROR\033[1;0m')
logging.addLevelName(logging.DEBUG, '\x1b[33mDEBUG\033[1;0m')
logging.addLevelName(logging.INFO, '\x1b[32mINFO\033[1;0m')
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT, datefmt=LOG_DATE)
log = logging.getLogger(__name__)
logging.getLogger('peewee').setLevel(logging.WARNING)
2019-02-03 23:32:48 -06:00
2019-02-05 22:12:19 -06:00
TIMEOUT = 10
2020-01-07 00:32:48 -06:00
PATH_INVOICES = 'facturas'
2020-01-11 22:14:38 -06:00
PG_DUMP = 'pg_dump -U postgres'
PSQL = 'psql -U postgres'
if DEBUG:
PG_DUMP = 'pg_dump -h localhost -U postgres'
PSQL = 'psql -h localhost -U postgres'
2019-02-05 22:12:19 -06:00
2020-01-21 23:25:13 -06:00
PACS = {
2020-12-28 22:28:52 -06:00
'finkok': PACFinkok,
2020-01-27 22:39:39 -06:00
'comercio': PACComercioDigital,
2020-01-21 23:25:13 -06:00
}
2021-01-02 22:23:33 -06:00
NS_CFDI = {
'cfdi': 'http://www.sat.gob.mx/cfd/3',
'tdf': 'http://www.sat.gob.mx/TimbreFiscalDigital',
}
2019-02-05 22:12:19 -06:00
2019-02-15 14:38:41 -06:00
#~ https://github.com/kennethreitz/requests/blob/v1.2.3/requests/structures.py#L37
class CaseInsensitiveDict(collections.MutableMapping):
"""
A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive:
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.lower()``s, the
behavior is undefined.
"""
def __init__(self, data=None, **kwargs):
self._store = dict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value):
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.lower()] = (key, value)
def __getitem__(self, key):
return self._store[key.lower()][1]
def __delitem__(self, key):
del self._store[key.lower()]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
2019-02-05 22:12:19 -06:00
class SendMail(object):
def __init__(self, config):
self._config = config
self._server = None
self._error = ''
self._is_connect = self._login()
@property
def is_connect(self):
return self._is_connect
@property
def error(self):
return self._error
def _login(self):
try:
if self._config['ssl'] and self._config['starttls']:
2019-02-05 22:12:19 -06:00
self._server = smtplib.SMTP(
self._config['server'],
self._config['port'], timeout=TIMEOUT)
self._server.ehlo()
self._server.starttls()
self._server.ehlo()
elif self._config['ssl']:
self._server = smtplib.SMTP_SSL(
self._config['server'],
self._config['port'], timeout=TIMEOUT)
self._server.ehlo()
else:
self._server = smtplib.SMTP(
self._config['server'],
self._config['port'], timeout=TIMEOUT)
self._server.login(self._config['user'], self._config['pass'])
return True
except smtplib.SMTPAuthenticationError as e:
if '535' in str(e):
self._error = 'Nombre de usuario o contraseña inválidos'
return False
if '534' in str(e) and 'gmail' in self._config['server']:
self._error = 'Necesitas activar el acceso a otras ' \
'aplicaciones en tu cuenta de GMail'
return False
except smtplib.SMTPException as e:
self._error = str(e)
return False
except Exception as e:
self._error = str(e)
return False
return
def send(self, options):
try:
message = MIMEMultipart()
message['From'] = self._config['user']
message['To'] = options['to']
message['CC'] = options.get('copy', '')
message['Subject'] = options['subject']
message['Date'] = formatdate(localtime=True)
if options.get('confirm', False):
message['Disposition-Notification-To'] = message['From']
message.attach(MIMEText(options['message'], 'html'))
for f in options.get('files', ()):
part = MIMEBase('application', 'octet-stream')
if isinstance(f[0], str):
part.set_payload(f[0].encode('utf-8'))
else:
part.set_payload(f[0])
encoders.encode_base64(part)
part.add_header(
'Content-Disposition',
"attachment; filename={}".format(f[1]))
message.attach(part)
receivers = options['to'].split(',') + message['CC'].split(',')
self._server.sendmail(
self._config['user'], receivers, message.as_string())
return ''
except Exception as e:
return str(e)
def close(self):
try:
self._server.quit()
except:
pass
return
2019-02-15 14:38:41 -06:00
class CfdiToDict(object):
2022-03-10 20:29:50 -06:00
NS_VERSION = {
'cfdi3.3': 'http://www.sat.gob.mx/cfd/3',
'cfdi4.0': 'http://www.sat.gob.mx/cfd/4',
}
2019-02-15 14:38:41 -06:00
NS = {
'divisas': 'http://www.sat.gob.mx/divisas',
2020-03-02 17:27:02 -06:00
'leyendasFisc': 'http://www.sat.gob.mx/leyendasFiscales',
'cartaporte20': 'http://www.sat.gob.mx/CartaPorte20',
}
tipo_figura = {
'01': '[01] Operador',
'02': '[02] Propietario',
'03': '[03] Arrendador',
'04': '[04] Notificado',
}
PAISES = {
'MEX': 'México',
}
ESTADOS = {
'AGU': 'Aguascalientes',
'BCN': 'Baja California',
'BCS': 'Baja California Sur',
'CAM': 'Campeche',
'CHP': 'Chiapas',
'CHH': 'Chihuahua',
'COA': 'Coahuila',
'COL': 'Colima',
'DIF': 'Ciudad de México',
'DUR': 'Durango',
'GUA': 'Guanajuato',
'GRO': 'Guerrero',
'HID': 'Hidalgo',
'JAL': 'Jalisco',
'MEX': 'México',
'MIC': 'Michoacán',
'MOR': 'Morelos',
'NAC': 'Nacional',
'NAY': 'Nayarit',
'NLE': 'Nuevo León',
'OAX': 'Oaxaca',
'PUE': 'Puebla',
'QUE': 'Querétaro',
'ROO': 'Quintana Roo',
'SLP': 'San Luis Potosí',
'SIN': 'Sinaloa',
'SON': 'Sonora',
'TAB': 'Tabasco',
'TAM': 'Tamaulipas',
'TLA': 'Tlaxcala',
'VER': 'Veracruz',
'YUC': 'Yucatán',
'ZAC': 'Zacatecas',
2019-02-15 14:38:41 -06:00
}
def __init__(self, xml):
2020-03-02 17:27:02 -06:00
self._values = {
'leyendas': (),
}
2019-02-15 14:38:41 -06:00
self._root = ET.parse(BytesIO(xml.encode())).getroot()
self._get_values()
@property
def values(self):
return self._values
def _get_values(self):
2022-03-10 20:29:50 -06:00
version = self._root.attrib['Version']
ns = f'cfdi{version}'
self.NS['cfdi'] = self.NS_VERSION[ns]
2019-02-15 14:38:41 -06:00
self._complementos()
return
def _set_carta_porte_domicilio(self, data):
municipio = data['Municipio']
estado = self.ESTADOS[data['Estado']]
pais = self.PAISES[data['Pais']]
domicilio = f"{municipio}, {estado}, {pais}, C.P. {data['CodigoPostal']}"
return domicilio
2019-02-15 14:38:41 -06:00
def _complementos(self):
2020-03-02 17:27:02 -06:00
path = '//cfdi:Complemento'
complemento = self._root.xpath(path, namespaces=self.NS)[0]
2019-02-15 14:38:41 -06:00
2020-03-02 17:27:02 -06:00
path = '//divisas:Divisas'
divisas = complemento.xpath(path, namespaces=self.NS)
2019-02-15 14:38:41 -06:00
if divisas:
d = CaseInsensitiveDict(divisas[0].attrib)
d.pop('version', '')
self._values.update({'divisas': d})
2020-03-02 17:27:02 -06:00
path = '//leyendasFisc:Leyenda'
node = complemento.xpath(path, namespaces=self.NS)
if node:
leyendas = [CaseInsensitiveDict(n.attrib) for n in node]
self._values['leyendas'] = leyendas
path = '//cartaporte20:CartaPorte'
carta_porte = complemento.xpath(path, namespaces=self.NS)
if carta_porte:
values = CaseInsensitiveDict(carta_porte[0].attrib)
for node in carta_porte[0]:
if 'FiguraTransporte' in node.tag:
figuras = CaseInsensitiveDict(node[0].attrib)
figuras['TipoFigura'] = self.tipo_figura[figuras['TipoFigura']]
values['figuras'] = figuras
elif 'Mercancias' in node.tag:
mercancias = CaseInsensitiveDict(node.attrib)
detalle = [CaseInsensitiveDict(n.attrib)
for n in node if 'Mercancia' in n.tag]
values['mercancias'] = {
'mercancias': mercancias,
'detalle': detalle,
}
path = '//cartaporte20:Autotransporte'
node_auto = node.xpath(path, namespaces=self.NS)[0]
values_auto = CaseInsensitiveDict(node_auto.attrib)
values['autotransporte'] = values_auto
path = '//cartaporte20:IdentificacionVehicular'
node_tmp = node_auto.xpath(path, namespaces=self.NS)[0]
values_auto = CaseInsensitiveDict(node_tmp.attrib)
values['autotransporte'].update(values_auto)
path = '//cartaporte20:Seguros'
node_tmp = node_auto.xpath(path, namespaces=self.NS)[0]
values_auto = CaseInsensitiveDict(node_tmp.attrib)
values['autotransporte'].update(values_auto)
path = '//cartaporte20:Remolques'
try:
node_tmp = node_auto.xpath(path, namespaces=self.NS)[0][0]
values_auto = CaseInsensitiveDict(node_tmp.attrib)
values['autotransporte'].update(values_auto)
except IndexError:
pass
elif 'Ubicaciones' in node.tag:
ubicaciones = []
for n in node:
ubicacion = CaseInsensitiveDict(n.attrib)
ubicacion['domicilio'] = self._set_carta_porte_domicilio(
CaseInsensitiveDict(n[0].attrib))
ubicaciones.append(ubicacion)
values['ubicaciones'] = ubicaciones
self._values['carta_porte'] = values
2019-02-15 14:38:41 -06:00
return
2020-01-07 00:32:48 -06:00
def _call(args):
return subprocess.check_output(args, shell=True).decode()
2020-01-11 22:14:38 -06:00
def _run(args, wait=False):
result = ''
cmd = shlex.split(args)
if wait:
result = subprocess.run(cmd, shell=True, check=True).stdout.decode()
else:
subprocess.run(cmd)
return result
2020-01-07 00:32:48 -06:00
def _join(*paths):
return os.path.join(*paths)
def run_in_thread(fn):
def run(*k, **kw):
t = threading.Thread(target=fn, args=k, kwargs=kw)
t.start()
return t
return run
2019-02-05 22:12:19 -06:00
def send_mail(data):
msg = ''
ok = True
server = SendMail(data['server'])
if server.is_connect:
msg = server.send(data['mail'])
else:
msg = server.error
ok = False
server.close()
return {'ok': ok, 'msg': msg}
2019-02-03 23:32:48 -06:00
def round_up(value):
return int(math.ceil(value))
def _get_key(password):
2019-02-04 22:13:11 -06:00
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
digest.update(password.encode())
key = base64.urlsafe_b64encode(digest.finalize())
2019-02-03 23:32:48 -06:00
return key
2019-02-04 22:13:11 -06:00
def encrypt(data, password):
2019-02-03 23:32:48 -06:00
f = Fernet(_get_key(password))
return f.encrypt(data.encode()).decode()
2019-02-04 22:13:11 -06:00
def decrypt(data, password):
2019-02-03 23:32:48 -06:00
f = Fernet(_get_key(password))
return f.decrypt(data.encode()).decode()
2019-02-05 22:12:19 -06:00
def to_bool(value):
return bool(int(value))
def get_url(url):
r = requests.get(url).text
return r
2019-02-15 14:38:41 -06:00
2019-02-15 17:51:13 -06:00
def parse_date(value, next_day=False):
d = parser.parse(value)
if next_day:
return d + datetime.timedelta(days=1)
return d
2019-02-17 13:29:10 -06:00
2019-02-17 22:12:01 -06:00
def to_zip(files):
2019-02-17 13:29:10 -06:00
zip_buffer = BytesIO()
with zipfile.ZipFile(zip_buffer, 'a', zipfile.ZIP_DEFLATED, False) as zip_file:
2019-02-17 22:12:01 -06:00
for file_name, data in files.items():
2019-02-17 13:29:10 -06:00
zip_file.writestr(file_name, data)
return zip_buffer.getvalue()
2020-01-07 00:32:48 -06:00
2020-01-11 22:14:38 -06:00
def dumps(data):
return json.dumps(data, default=str)
2020-01-07 00:32:48 -06:00
2020-01-11 22:14:38 -06:00
def loads(data):
return json.loads(data)
2020-01-12 22:40:45 -06:00
def json_loads(path):
return json.loads(open(path, 'r').read())
2020-01-11 22:14:38 -06:00
def _validate_db_rfc():
con = sqlite3.connect(DB_COMPANIES)
sql = """
CREATE TABLE IF NOT EXISTS names(
rfc TEXT NOT NULL COLLATE NOCASE UNIQUE,
con TEXT NOT NULL
);
"""
cursor = con.cursor()
cursor.executescript(sql)
cursor.close()
con.close()
2020-01-07 00:32:48 -06:00
return
2020-01-11 22:14:38 -06:00
def _sql_companies(sql, args=()):
_validate_db_rfc()
con = sqlite3.connect(DB_COMPANIES)
cursor = con.cursor()
try:
cursor.execute(sql, args)
data = cursor.fetchall()
except sqlite3.IntegrityError as e:
log.error(e)
return False
con.commit()
cursor.close()
con.close()
return data
2020-01-21 23:25:13 -06:00
def get_data_con(rfc):
data = rfc_get(rfc)[0][0]
return loads(data)
def rfc_get(rfc=''):
if rfc:
sql = "SELECT con FROM names WHERE rfc = ?"
w = (rfc,)
else:
w = ()
sql = "SELECT * FROM names"
data = _sql_companies(sql, w)
2020-01-11 22:14:38 -06:00
return data
def rfc_exists(rfc):
sql = "SELECT rfc FROM names WHERE rfc = ?"
data = _sql_companies(sql, (rfc,))
if isinstance(data, bool):
return
return bool(data)
def rfc_add(rfc, con):
sql = "INSERT INTO names VALUES (?, ?)"
data = _sql_companies(sql, (rfc.upper(), dumps(con)))
return True
def db_create(user):
args = f'{PSQL} -c "CREATE ROLE {user} WITH LOGIN ENCRYPTED PASSWORD \'{user}\';"'
_run(args)
args = f'{PSQL} -c "CREATE DATABASE {user} WITH OWNER {user};"'
_run(args)
return True
def db_delete(user, path, no_database=False):
sql = "DELETE FROM names WHERE rfc = ?"
data = _sql_companies(sql, (user,))
if no_database:
return True
user = user.replace('&', '').lower()
dt = now().strftime('%y%m%d_%H%M')
path_bk = _join(path, f'{user}_{dt}.bk')
args = f'{PG_DUMP} -d {user} -Fc -f "{path_bk}"'
_run(args)
args = f'{PSQL} -c "DROP DATABASE {user};"'
_run(args)
args = f'{PSQL} -c "DROP ROLE {user};"'
_run(args)
return True
2020-01-07 00:32:48 -06:00
def _get_pass(rfc):
return rfc
2020-01-21 23:25:13 -06:00
def _backup_db(rfc, is_mv, url_seafile):
log.info(f'Generando backup de: {rfc.upper()}')
bk_name = f'{rfc}.bk'
path = _join(PATHS['BK'], bk_name)
args = f'{PG_DUMP} -d {rfc} -Fc -f "{path}"'
_run(args)
2020-01-07 00:32:48 -06:00
log.info('\tBackup local generado...')
2020-01-21 23:25:13 -06:00
2020-01-07 00:32:48 -06:00
if is_mv:
2020-01-21 23:25:13 -06:00
path_target = _validate_path_local()
if path_target:
2020-01-07 00:32:48 -06:00
path_target = _join(path_target, bk_name)
2020-01-21 23:25:13 -06:00
shutil.copy(path, path_target)
2020-01-07 00:32:48 -06:00
else:
log.error('\tNo existe la carpeta compartida...')
return
2020-01-21 23:25:13 -06:00
def db_backup(is_mv, url_seafile):
data = rfc_get()
if not len(data):
msg = 'Sin bases de datos a respaldar'
log.info(msg)
2020-01-07 00:32:48 -06:00
return
2020-01-21 23:25:13 -06:00
for rfc, _ in data:
_backup_db(rfc.lower(), is_mv, url_seafile)
2020-01-07 00:32:48 -06:00
return
2020-01-11 22:14:38 -06:00
def _validate_path_local():
path_bk = _join(str(Path.home()), PATHS['LOCAL'])
if not os.path.isdir(path_bk):
path_bk = ''
return path_bk
def db_backup_local():
path_bk = _validate_path_local()
if not path_bk:
msg = 'No existe la carpeta local'
return {'ok': False, 'msg': msg}
data = rfc_get()
if not len(data):
msg = 'Sin bases de datos a respaldar'
return {'ok': False, 'msg': msg}
for row in data:
user = row[0].lower()
db = loads(row[1])['name']
path = _join(path_bk, '{}.bk'.format(user))
args = f'{PG_DUMP} -d {user} -Fc -f "{path}"'
_run(args)
msg = 'Bases de datos respaldadas correctamente'
result = {'ok': True, 'msg': msg}
return result
2020-01-07 00:32:48 -06:00
def now():
return datetime.datetime.now().replace(microsecond=0)
def get_days(date):
return (now() - date).days
2020-01-21 23:25:13 -06:00
def get_pass():
pass1 = getpass.getpass('Introduce la contraseña: ')
pass2 = getpass.getpass('Confirma la contraseña: ')
if pass1 != pass2:
msg = 'Las contraseñas son diferentes'
return False, msg
password = pass1.strip()
if not password:
msg = 'La contraseña es necesaria'
return False, msg
return True, password
2020-12-29 21:53:51 -06:00
def xml_stamp(xml, auth):
2020-01-21 23:25:13 -06:00
if not DEBUG and not auth:
msg = 'Sin datos para timbrar'
result = {'ok': False, 'error': msg}
return result
result = {'ok': True, 'error': ''}
2020-12-29 21:53:51 -06:00
pac = PACS[auth['pac']]()
response = pac.stamp(xml, auth)
2020-01-21 23:25:13 -06:00
2020-12-29 21:53:51 -06:00
if not response:
2020-01-21 23:25:13 -06:00
result['ok'] = False
result['error'] = pac.error
return result
2020-12-29 21:53:51 -06:00
result.update(response)
2020-01-21 23:25:13 -06:00
return result
2020-01-22 22:30:11 -06:00
2020-01-27 22:39:39 -06:00
def xml_cancel(xml, auth, cert, name):
msg = 'Factura cancelada correctamente'
data = {'ok': True, 'msg': msg, 'row': {'estatus': 'Cancelada'}}
pac = PACS[name]()
# ~ result = pac.cancel_xml(certificado.rfc, str(uuid).upper(),
# ~ certificado.cer_pem.encode(), _get_pem_from_pfx(certificado))
# ~ if result:
# ~ codes = {None: '',
# ~ 'Could not get UUID Text': 'UUID no encontrado',
# ~ 'Invalid Passphrase': 'Contraseña inválida',
# ~ }
# ~ if not result['CodEstatus'] is None:
# ~ data['ok'] = False
# ~ data['msg'] = codes.get(result['CodEstatus'], result['CodEstatus'])
# ~ else:
data['ok'] = False
data['msg'] = pac.error
data['msg'] = 'Error Test'
return data, result
2021-01-10 19:18:13 -06:00
def get_client_balance(auth, rfc=''):
2020-12-30 22:45:57 -06:00
pac = PACS[auth['pac']]()
2021-01-10 19:18:13 -06:00
balance = pac.client_balance(auth, rfc)
2020-01-22 22:30:11 -06:00
if pac.error:
2020-12-31 12:01:41 -06:00
balance = 'p/e'
2020-01-22 22:30:11 -06:00
return balance
2020-12-30 22:45:57 -06:00
def get_cert(args):
2020-12-31 00:01:41 -06:00
cer = base64.b64decode(args['cer'].split(',')[1])
key = base64.b64decode(args['key'].split(',')[1])
cert = SATCertificate(cer, key, args['contra'])
return cert
2020-12-30 22:45:57 -06:00
2020-12-31 12:01:41 -06:00
def make_xml(data, certificado):
cert = SATCertificate(certificado.cer, certificado.key_enc.encode())
# ~ if DEBUG:
# ~ data['emisor']['Rfc'] = certificado.rfc
# ~ data['emisor']['RegimenFiscal'] = '603'
2020-12-31 12:01:41 -06:00
cfdi = CFDI()
xml = ET.parse(BytesIO(cfdi.get_xml(data).encode()))
path_xslt = _join(PATHS['xslt'], 'cadena.xslt')
xslt = open(path_xslt, 'rb')
transfor = ET.XSLT(ET.parse(xslt))
cadena = str(transfor(xml)).encode()
stamp = cert.sign(cadena)
xslt.close()
return cfdi.add_sello(stamp, cert.cer_txt)
2020-12-31 15:08:49 -06:00
2021-01-02 22:23:33 -06:00
def get_pac_by_rfc(cfdi):
tree = ET.fromstring(cfdi.encode())
path = 'string(//cfdi:Complemento/tdf:TimbreFiscalDigital/@RfcProvCertif)'
rfc_pac = tree.xpath(path, namespaces=NS_CFDI)
return RFCS[rfc_pac]
2022-01-03 23:06:55 -06:00
def _cancel_with_cert(invoice, args, auth, certificado):
2021-01-03 19:44:52 -06:00
cert = SATCertificate(certificado.cer, certificado.key_enc.encode())
pac = PACS[auth['pac']]()
2022-02-01 21:31:06 -06:00
contra = ''
try:
contra = decrypt(bytes(certificado.p12).decode(), certificado.serie)
except Exception as e:
log.error(e)
if auth['pac'] == 'comercio':
msg = 'Es necesario subir de nuevo los certificados de sello'
data = {'ok': False, 'msg': msg, 'row': {}}
return data
2022-01-24 17:05:41 -06:00
info = {'cer': cert.cer_pem, 'key': cert.key_pem, 'cer_ori': cert.cer,
2022-02-01 21:31:06 -06:00
'key_enc': certificado.key, 'pass': contra, 'args': args}
2021-01-03 19:44:52 -06:00
result = pac.cancel(invoice.xml, info, auth)
if pac.error:
data = {'ok': False, 'msg': pac.error, 'row': {}}
return data
msg = 'Factura cancelada correctamente'
data = {'ok': True, 'msg': msg, 'row': {'estatus': 'Cancelada'},
'date': result['date'], 'acuse': result['acuse']}
return data
2022-01-03 23:06:55 -06:00
def cancel_xml_sign(invoice, args, auth, certificado):
2022-01-24 15:45:40 -06:00
# ~ if auth['pac'] == 'finkok':
return _cancel_with_cert(invoice, args, auth, certificado)
2021-01-03 19:44:52 -06:00
2020-12-31 15:08:49 -06:00
cert = SATCertificate(certificado.cer, certificado.key_enc.encode())
pac = PACS[auth['pac']]()
folio_new = ''
if args['uuid']:
folio_new = f' FolioSustitucion="{args["uuid"]}"'
2020-12-31 15:08:49 -06:00
data = {
'rfc': certificado.rfc,
'fecha': now().isoformat()[:19],
2021-01-03 19:44:52 -06:00
'uuid': str(invoice.uuid).upper(),
2022-01-20 17:02:11 -06:00
'motivo': args['reason'],
'folio': folio_new,
2020-12-31 15:08:49 -06:00
}
2022-01-25 19:12:41 -06:00
2020-12-31 15:08:49 -06:00
template = TEMPLATE_CANCEL.format(**data)
tree = ET.fromstring(template.encode())
sign_xml = cert.sign_xml(tree)
# ~ print(sign_xml)
2021-01-02 22:23:33 -06:00
result = pac.cancel_xml(sign_xml, auth, invoice.xml)
2020-12-31 15:08:49 -06:00
if pac.error:
2021-01-03 19:44:52 -06:00
data = {'ok': False, 'msg': pac.error, 'row': {}}
return data
2020-12-31 15:08:49 -06:00
msg = 'Factura cancelada correctamente'
2021-01-03 19:44:52 -06:00
data = {'ok': True, 'msg': msg, 'row': {'estatus': 'Cancelada'},
'date': result['date'], 'acuse': result['acuse']}
return data
2021-02-09 22:34:15 -06:00
2021-02-09 22:44:26 -06:00
def _get_data_sat(xml):
BF = 'string(//*[local-name()="{}"]/@{})'
NS_CFDI = {'cfdi': 'http://www.sat.gob.mx/cfd/3'}
2021-02-09 23:04:02 -06:00
try:
tree = ET.fromstring(xml.encode())
emisor = escape(
tree.xpath('string(//cfdi:Emisor/@rfc)', namespaces=NS_CFDI) or
tree.xpath('string(//cfdi:Emisor/@Rfc)', namespaces=NS_CFDI)
)
receptor = escape(
tree.xpath('string(//cfdi:Receptor/@rfc)', namespaces=NS_CFDI) or
tree.xpath('string(//cfdi:Receptor/@Rfc)', namespaces=NS_CFDI)
)
total = tree.get('total') or tree.get('Total')
uuid = tree.xpath(BF.format('TimbreFiscalDigital', 'UUID'))
except Exception as e:
return ''
2021-02-09 22:44:26 -06:00
data = f'?re={emisor}&amp;rr={receptor}&amp;tt={total}&amp;id={uuid}'
return data
2021-02-09 22:34:15 -06:00
def get_status_sat(xml):
2021-02-09 22:44:26 -06:00
data = _get_data_sat(xml)
if not data:
return 'XML inválido'
data = """<?xml version="1.0" encoding="UTF-8"?>
<soap:Envelope
xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<soap:Header/>
<soap:Body>
<Consulta xmlns="http://tempuri.org/">
<expresionImpresa>
{}
</expresionImpresa>
</Consulta>
</soap:Body>
</soap:Envelope>""".format(data)
headers = {
'SOAPAction': '"http://tempuri.org/IConsultaCFDIService/Consulta"',
'Content-type': 'text/xml; charset="UTF-8"'
}
URL = 'https://consultaqr.facturaelectronica.sat.gob.mx/consultacfdiservice.svc'
try:
result = requests.post(URL, data=data, headers=headers)
2021-02-09 23:01:08 -06:00
tree = ET.fromstring(result.text)
2021-02-09 22:44:26 -06:00
node = tree.xpath("//*[local-name() = 'Estado']")[0]
except Exception as e:
return 'Error: {}'.format(str(e))
return node.text
2021-02-09 22:34:15 -06:00
2021-02-10 22:34:34 -06:00
def spaces(value):
return '\n'.join([' '.join(l.split()) for l in value.split('\n')])
def to_slug(string):
value = (unicodedata.normalize('NFKD', string)
.encode('ascii', 'ignore')
.decode('ascii').lower())
return value.replace(' ', '_')
def read_csv(path, args={'delimiter': '|'}):
with open(path) as f:
reader = csv.DictReader(f, **args)
rows = [r for r in reader]
return rows
2021-06-07 22:45:00 -05:00
def _products_from_xml(rfc, data):
result = {'status': 'server', 'error': ''}
cfdi = CfdiRead(data)
if not DEBUG and rfc != cfdi.rfc_receptor:
msg = f'El receptor no es: {rfc}'
result['error'] = msg
return result
result['data'] = cfdi.data
2021-06-14 23:41:39 -05:00
# ~ result['data']['xml'] = cfdi.source
del result['data']['cfdi']['Certificado']
del result['data']['cfdi']['NoCertificado']
del result['data']['cfdi']['Sello']
del result['data']['timbre']['SelloCFD']
del result['data']['timbre']['SelloSAT']
2021-06-13 23:12:07 -05:00
emisor = result['data']['emisor']
2021-06-15 23:45:11 -05:00
emisor['rfc'] = emisor.pop('Rfc')
emisor['nombre'] = emisor.pop('Nombre')
2021-06-13 23:12:07 -05:00
result['data']['emisor'] = emisor
2021-06-10 21:39:15 -05:00
products = result['data']['conceptos']
rows = []
for p in products:
row = {
'key': p['NoIdentificacion'],
'key_sat': p['ClaveProdServ'],
'description': p['Descripcion'],
'unit': p['Unidad'],
'unit_value': p['ValorUnitario'],
'import': p['Importe'],
'cant': p['Cantidad'],
}
rows.append(row)
result['data']['conceptos'] = rows
return result
2021-06-07 22:45:00 -05:00
def save_file(path, data, modo='wb'):
try:
with open(path, modo) as f:
f.write(data)
return True
except:
return False
def _save_template(rfc, name, file_obj):
result = {'status': 'server', 'ok': False}
ext1 = name[-3:]
ext2 = file_obj.filename.split('.')[-1].lower()
if ext1 != ext2:
msg = f'Extensión incorrecta del archivo: {ext2}'
result['error'] = msg
return result
rfc = rfc.lower()
path = _join(PATHS['USER'], f'{rfc}{name}')
if save_file(path, file_obj.file.read()):
result['ok'] = True
2021-06-07 22:45:00 -05:00
return result
2021-06-29 19:07:55 -05:00
def upload_file(rfc, name, file_obj):
if name == 'productsadd':
return _products_from_xml(rfc, file_obj.file.read())
return _save_template(rfc, name, file_obj)
2021-12-30 11:56:22 -06:00
def get_qr(data, kind='svg', in_base64=False):
2021-06-29 19:07:55 -05:00
buffer = io.BytesIO()
2021-12-30 11:56:22 -06:00
segno.make(data).save(buffer, kind=kind, scale=8, border=2)
qr = buffer
if in_base64:
qr = base64.b64encode(qr.getvalue()).decode()
return qr