From c116468b0038253a656c6afb408d94be96125833 Mon Sep 17 00:00:00 2001 From: James Uejio Date: Wed, 5 Aug 2020 09:49:26 -0400 Subject: [PATCH 1/8] Add functionality so that if one entry breaks during a batch, it will not crash the entire batch --- README.rst | 62 +++++++++++++++++++++++++++++++++++++++++++++++ ach/builder.py | 41 ++++++++++++++++--------------- ach/data_types.py | 5 +--- example.py | 25 +++++++++++++++++++ setup.py | 4 +-- 5 files changed, 112 insertions(+), 25 deletions(-) diff --git a/README.rst b/README.rst index c1f0a80..133ef5e 100644 --- a/README.rst +++ b/README.rst @@ -72,3 +72,65 @@ This returns the following NACHA file: 9000001000001000000040037014587000000015000000000002213 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 + + +Below is another example of what happens if the validation fails for one entry. + +.. code:: python + + + from ach.builder import AchFile + + settings = { + 'immediate_dest' : '123456789', # Your bank's routing number + 'immediate_org' : '123456789', # Bank assigned routing number + 'immediate_dest_name' : 'YOUR BANK', + 'immediate_org_name' : 'YOUR COMPANY', + 'company_id' : '1234567890', #tax number + } + + ach_file = AchFile('B',settings) #file Id mod + + entries = [ + { + 'type' : '27', + 'routing_number' : '********', # invalid + 'account_number' : '********', # invalid + 'amount' : '150.00', + 'name' : 'Billy Holiday', + }, + { + 'type' : '22', + 'routing_number' : '123232318', + 'account_number' : '123123123', + 'amount' : '12.13', + 'name' : 'Rachel Welch', + }, + ] + + print(ach_file.add_batch('PPD', entries, credits=True, debits=True)) + +This prints the following information: + +:: + +[({'routing_number': '********', 'amount': '150.00', 'type': '27', 'account_number': '********', 'name': 'Billy Holiday'}, AchError('field needs to be numeric characters only',))] + +Here is the ach file with the skipped entry. + +.. code:: python + + print ach_file.render_to_string() + +:: + + 101 123456780 1234567802008071448B094101YOUR BANK YOUR COMPANY + 5200YOUR COMPANY 1234567890PPDPAYROLL 200808 1123456780000001 + 622123232318123123123 0000001213 RACHEL WELCH 0123456780000001 + 820000000100123232310000000000000000000012131234567890 123456780000001 + 9000001000001000000010012323231000000000000000000001213 + 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 + 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 + 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 + 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 + 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999 diff --git a/ach/builder.py b/ach/builder.py index b3d48f3..e84eae5 100644 --- a/ach/builder.py +++ b/ach/builder.py @@ -79,35 +79,38 @@ def add_batch(self, std_ent_cls_code, batch_entries=None, company_name=(company_name or self.settings['company_name'])[:16], ) - entries = [] + entries, failed_entry_errors = [], [] entry_counter = 1 for record in batch_entries: + try: + entry = EntryDetail( + std_ent_cls_code=std_ent_cls_code, + id_number=record.get('id_number', ''), + ) - entry = EntryDetail( - std_ent_cls_code=std_ent_cls_code, - id_number=record.get('id_number', ''), - ) - - entry.transaction_code = record.get('type') - entry.recv_dfi_id = record.get('routing_number') + entry.transaction_code = record.get('type') + entry.recv_dfi_id = record.get('routing_number') - if len(record['routing_number']) < 9: - entry.calc_check_digit() - else: - entry.check_digit = record['routing_number'][8] + if len(record['routing_number']) < 9: + entry.calc_check_digit() + else: + entry.check_digit = record['routing_number'][8] - entry.dfi_acnt_num = record['account_number'] - entry.amount = int(round(float(record['amount']) * 100)) - entry.ind_name = record['name'].upper()[:22] - entry.trace_num = self.settings['immediate_dest'][:8] \ - + entry.validate_numeric_field(entry_counter, 7) + entry.dfi_acnt_num = record['account_number'] + entry.amount = int(round(float(record['amount']) * 100)) + entry.ind_name = record['name'].upper()[:22] + entry.trace_num = self.settings['immediate_dest'][:8] \ + + entry.validate_numeric_field(entry_counter, 7) - entries.append((entry, record.get('addenda', []))) - entry_counter += 1 + entries.append((entry, record.get('addenda', []))) + entry_counter += 1 + except Exception as e: + failed_entry_errors.append((record, e)) self.batches.append(FileBatch(batch_header, entries)) self.set_control() + return failed_entry_errors def set_control(self): diff --git a/ach/data_types.py b/ach/data_types.py index 75c9568..000ab2f 100644 --- a/ach/data_types.py +++ b/ach/data_types.py @@ -62,10 +62,7 @@ def validate_alpha_numeric_field(self, field, length): """ str_length = str(length) - match = re.match( - r'([\w\s^!_@#$%&,*:./+\-]{1,' + str_length + '})', - field, - ) + match = re.match(r'([\w,\s]{1,' + str_length + '})', field) if match: if len(match.group(1)) < length: diff --git a/example.py b/example.py index 0112d04..5c20e2d 100644 --- a/example.py +++ b/example.py @@ -42,3 +42,28 @@ ach_file.add_batch('PPD', entries, credits=True, debits=True) print ach_file.render_to_string() + +# add_batch will skip failures and return them + +ach_file = AchFile('B', settings) #file Id mod + +entries = [ + { + 'type' : '27', + 'routing_number' : '********', # invalid + 'account_number' : '********', # invalid + 'amount' : '150.00', + 'name' : 'Billy Holiday', + }, + { + 'type' : '22', + 'routing_number' : '123232318', + 'account_number' : '123123123', + 'amount' : '12.13', + 'name' : 'Rachel Welch', + }, +] + +print ach_file.add_batch('PPD', entries, credits=True, debits=True) + +print ach_file.render_to_string() diff --git a/setup.py b/setup.py index 7821b6f..a2c239c 100644 --- a/setup.py +++ b/setup.py @@ -3,8 +3,8 @@ setup( name='carta-ach', author='Carta, Inc.', - author_email='jared.hobbs@carta.com', - version='0.4.5', + author_email='james.uejio@carta.com', + version='0.4.6', packages=[ 'ach', ], From 1c23104f28e74dc6e68a400ee6547135ae2dab39 Mon Sep 17 00:00:00 2001 From: James Uejio Date: Mon, 10 Aug 2020 14:47:07 -0400 Subject: [PATCH 2/8] Updating setup.py to use more up to date setuptools library to produce wheel file to publish python package --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index a2c239c..e92b6f4 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -from distutils.core import setup +from setuptools import setup setup( name='carta-ach', From 64538d43e01a359d19f4d548920b9aec1743010e Mon Sep 17 00:00:00 2001 From: James Uejio Date: Tue, 6 Apr 2021 16:58:00 -0400 Subject: [PATCH 3/8] Run python black on repo --- .editorconfig | 29 ++ .gitignore | 2 +- ach/builder.py | 115 ++--- ach/data_types.py | 882 ++++++++++++++++++++----------------- ach/parser.py | 522 +++++++++++----------- example.py | 72 +-- pyproject.toml | 18 + setup.cfg | 0 setup.py | 18 +- tests/test_data_types.py | 46 +- tests/test_line_endings.py | 53 +-- 11 files changed, 930 insertions(+), 827 deletions(-) create mode 100644 .editorconfig create mode 100644 pyproject.toml create mode 100644 setup.cfg diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..5c4fa0f --- /dev/null +++ b/.editorconfig @@ -0,0 +1,29 @@ +# This file is for unifying the coding style for different editors and IDEs. +# More information at http://EditorConfig.org +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +insert_final_newline = true +max_line_length = 120 +trim_trailing_whitespace = true + +[*.md] +trim_trailing_whitespace = false + +[*.py] +include_trailing_comma = True +indent_size = 4 +max_line_length = 120 +multi_line_output = 5 +not_skip = __init__.py +sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER + +[*.yml,*.yaml] +indent_size = 2 + +[Makefile] +indent_style = tab diff --git a/.gitignore b/.gitignore index c29fa58..814dc42 100644 --- a/.gitignore +++ b/.gitignore @@ -30,4 +30,4 @@ pip-log.txt #Vim swp *.swp .idea -.* +.DS_Store diff --git a/ach/builder.py b/ach/builder.py index e84eae5..6958baa 100644 --- a/ach/builder.py +++ b/ach/builder.py @@ -2,8 +2,12 @@ from datetime import datetime, timedelta from .data_types import ( - Header, FileControl, BatchHeader, - BatchControl, EntryDetail, AddendaRecord + Header, + FileControl, + BatchHeader, + BatchControl, + EntryDetail, + AddendaRecord, ) @@ -23,14 +27,16 @@ def __init__(self, file_id_mod, settings): try: company_name = settings.get( - 'company_name', - settings['immediate_org_name'], + "company_name", + settings["immediate_org_name"], ) - self.settings['company_name'] = company_name + self.settings["company_name"] = company_name self.header = Header( - settings['immediate_dest'], - settings['immediate_org'], file_id_mod, - settings['immediate_dest_name'], settings['immediate_org_name'] + settings["immediate_dest"], + settings["immediate_org"], + file_id_mod, + settings["immediate_dest_name"], + settings["immediate_org_name"], ) except KeyError: raise Exception( @@ -40,9 +46,17 @@ def __init__(self, file_id_mod, settings): self.batches = list() - def add_batch(self, std_ent_cls_code, batch_entries=None, - credits=True, debits=False, eff_ent_date=None, - company_id=None, entry_desc=None, company_name=None): + def add_batch( + self, + std_ent_cls_code, + batch_entries=None, + credits=True, + debits=False, + eff_ent_date=None, + company_id=None, + entry_desc=None, + company_name=None, + ): """ Use this to add batches to the file. For valid std_ent_cls_codes see: http://en.wikipedia.org/wiki/Automated_Clearing_House#SEC_codes @@ -60,23 +74,23 @@ def add_batch(self, std_ent_cls_code, batch_entries=None, eff_ent_date = datetime.today() + timedelta(days=1) if credits and debits: - serv_cls_code = '200' + serv_cls_code = "200" elif credits: - serv_cls_code = '220' + serv_cls_code = "220" elif debits: - serv_cls_code = '225' + serv_cls_code = "225" batch_header = BatchHeader( serv_cls_code=serv_cls_code, batch_id=batch_count, - company_id=company_id or self.settings['company_id'], + company_id=company_id or self.settings["company_id"], std_ent_cls_code=std_ent_cls_code, entry_desc=entry_desc, - desc_date='', - eff_ent_date=eff_ent_date.strftime('%y%m%d'), # YYMMDD - orig_stat_code='1', - orig_dfi_id=self.settings['immediate_dest'][:8], - company_name=(company_name or self.settings['company_name'])[:16], + desc_date="", + eff_ent_date=eff_ent_date.strftime("%y%m%d"), # YYMMDD + orig_stat_code="1", + orig_dfi_id=self.settings["immediate_dest"][:8], + company_name=(company_name or self.settings["company_name"])[:16], ) entries, failed_entry_errors = [], [] @@ -86,24 +100,23 @@ def add_batch(self, std_ent_cls_code, batch_entries=None, try: entry = EntryDetail( std_ent_cls_code=std_ent_cls_code, - id_number=record.get('id_number', ''), + id_number=record.get("id_number", ""), ) - entry.transaction_code = record.get('type') - entry.recv_dfi_id = record.get('routing_number') + entry.transaction_code = record.get("type") + entry.recv_dfi_id = record.get("routing_number") - if len(record['routing_number']) < 9: + if len(record["routing_number"]) < 9: entry.calc_check_digit() else: - entry.check_digit = record['routing_number'][8] + entry.check_digit = record["routing_number"][8] - entry.dfi_acnt_num = record['account_number'] - entry.amount = int(round(float(record['amount']) * 100)) - entry.ind_name = record['name'].upper()[:22] - entry.trace_num = self.settings['immediate_dest'][:8] \ - + entry.validate_numeric_field(entry_counter, 7) + entry.dfi_acnt_num = record["account_number"] + entry.amount = int(round(float(record["amount"]) * 100)) + entry.ind_name = record["name"].upper()[:22] + entry.trace_num = self.settings["immediate_dest"][:8] + entry.validate_numeric_field(entry_counter, 7) - entries.append((entry, record.get('addenda', []))) + entries.append((entry, record.get("addenda", []))) entry_counter += 1 except Exception as e: failed_entry_errors.append((record, e)) @@ -122,8 +135,12 @@ def set_control(self): credit_amount = self.get_credit_amount(self.batches) self.control = FileControl( - batch_count, block_count, entadd_count, - entry_hash, debit_amount, credit_amount + batch_count, + block_count, + entadd_count, + entry_hash, + debit_amount, + credit_amount, ) def get_block_count(self, batches): @@ -138,8 +155,7 @@ def get_lines(self, batches): entadd_count = self.get_entadd_count(batches) - lines = header_count + control_count + batch_header_count \ - + batch_footer_count + entadd_count + lines = header_count + control_count + batch_header_count + batch_footer_count + entadd_count return lines @@ -177,16 +193,15 @@ def get_credit_amount(self, batches): credit_amount = 0 for batch in batches: - credit_amount = credit_amount + \ - int(batch.batch_control.credit_amount) + credit_amount = credit_amount + int(batch.batch_control.credit_amount) return credit_amount def get_nines(self, rows, line_ending): - nines = '' + nines = "" for i in range(rows): - nines += '9'*94 + nines += "9" * 94 if i == rows - 1: continue nines += line_ending @@ -195,12 +210,12 @@ def get_nines(self, rows, line_ending): def get_entry_desc(self, std_ent_cls_code): - if std_ent_cls_code == 'PPD': - entry_desc = 'PAYROLL' - elif std_ent_cls_code == 'CCD': - entry_desc = 'DUES' + if std_ent_cls_code == "PPD": + entry_desc = "PAYROLL" + elif std_ent_cls_code == "CCD": + entry_desc = "DUES" else: - entry_desc = 'OTHER' + entry_desc = "OTHER" return entry_desc @@ -252,7 +267,7 @@ def __init__(self, batch_header, entries): entadd_count += len(addenda) self.entries.append(FileEntry(entry, addenda)) - #set up batch_control + # set up batch_control batch_control = BatchControl(self.batch_header.serv_cls_code) @@ -285,8 +300,7 @@ def get_debit_amount(self, entries): debit_amount = 0 for entry in entries: - if str(entry.entry_detail.transaction_code) in \ - ['27', '37', '28', '38']: + if str(entry.entry_detail.transaction_code) in ["27", "37", "28", "38"]: debit_amount = debit_amount + int(entry.entry_detail.amount) return debit_amount @@ -295,8 +309,7 @@ def get_credit_amount(self, entries): credit_amount = 0 for entry in entries: - if str(entry.entry_detail.transaction_code) in \ - ['22', '32', '23', '33']: + if str(entry.entry_detail.transaction_code) in ["22", "32", "23", "33"]: credit_amount += int(entry.entry_detail.amount) return credit_amount @@ -339,9 +352,9 @@ def __init__(self, entry_detail, addenda_record=[]): self.addenda_record.append( AddendaRecord( self.entry_detail.std_ent_cls_code, - pmt_rel_info=addenda.get('payment_related_info').upper(), + pmt_rel_info=addenda.get("payment_related_info").upper(), add_seq_num=index + 1, - ent_det_seq_num=entry_detail.trace_num[-7:] + ent_det_seq_num=entry_detail.trace_num[-7:], ) ) diff --git a/ach/data_types.py b/ach/data_types.py index 000ab2f..b5a60ad 100644 --- a/ach/data_types.py +++ b/ach/data_types.py @@ -23,10 +23,10 @@ def make_space(self, spaces=1): Return string with x number of spaces Defaults to 1 """ - space_string = '' + space_string = "" for i in range(spaces): - space_string += ' ' + space_string += " " return space_string @@ -47,10 +47,10 @@ def make_zero(self, zeros=1): Return string with x number of zeros Defaults to 1 """ - zero_string = '' + zero_string = "" for i in range(zeros): - zero_string += '0' + zero_string += "0" return zero_string @@ -62,12 +62,11 @@ def validate_alpha_numeric_field(self, field, length): """ str_length = str(length) - match = re.match(r'([\w,\s]{1,' + str_length + '})', field) + match = re.match(r"([\w,\s]{1," + str_length + "})", field) if match: if len(match.group(1)) < length: - field = match.group(1) + self.make_space( - length - len(match.group(1))) + field = match.group(1) + self.make_space(length - len(match.group(1))) else: field = match.group(1) else: @@ -99,7 +98,7 @@ def validate_binary_field(self, field): Validates binary string field (either '1' or '0') """ - if field not in ['1', '0']: + if field not in ["1", "0"]: raise AchError("filed not '1' or '0'") return field @@ -109,30 +108,43 @@ class Header(Ach): Creates our File Header record of the nacha file """ - record_type_code = '1' - priority_code = '01' - record_size = '094' - blk_factor = '10' - format_code = '1' + record_type_code = "1" + priority_code = "01" + record_size = "094" + blk_factor = "10" + format_code = "1" alpha_numeric_fields = [ - 'immediate_dest', 'immediate_org', 'file_id_mod', 'im_dest_name', - 'im_orgn_name', 'reference_code', 'file_crt_date', 'file_crt_time' + "immediate_dest", + "immediate_org", + "file_id_mod", + "im_dest_name", + "im_orgn_name", + "reference_code", + "file_crt_date", + "file_crt_time", ] field_lengths = { - 'immediate_dest': 10, - 'immediate_org': 10, - 'file_id_mod': 1, - 'im_dest_name': 23, - 'im_orgn_name': 23, - 'reference_code': 8, - 'file_crt_date': 6, - 'file_crt_time': 4, + "immediate_dest": 10, + "immediate_org": 10, + "file_id_mod": 1, + "im_dest_name": 23, + "im_orgn_name": 23, + "reference_code": 8, + "file_crt_date": 6, + "file_crt_time": 4, } - def __init__(self, immediate_dest='', immediate_org='', file_id_mod='A', - im_dest_name='', im_orgn_name='', reference_code=''): + def __init__( + self, + immediate_dest="", + immediate_org="", + file_id_mod="A", + im_dest_name="", + im_orgn_name="", + reference_code="", + ): """ Initializes all values needed for our header row @@ -142,37 +154,32 @@ def __init__(self, immediate_dest='', immediate_org='', file_id_mod='A', self.immediate_dest = self.make_right_justified(immediate_dest, 10) self.immediate_org = self.make_right_justified(immediate_org, 10) - self.file_crt_date = date.strftime('%y%m%d') - self.file_crt_time = date.strftime('%H%M') + self.file_crt_date = date.strftime("%y%m%d") + self.file_crt_time = date.strftime("%H%M") self.file_id_mod = self.validate_file_id_mod(file_id_mod) self.im_dest_name = self.validate_alpha_numeric_field(im_dest_name, 23) self.im_orgn_name = self.validate_alpha_numeric_field(im_orgn_name, 23) - if reference_code != '': - self.reference_code = self.validate_alpha_numeric_field( - reference_code, 8) + if reference_code != "": + self.reference_code = self.validate_alpha_numeric_field(reference_code, 8) else: self.reference_code = self.make_space(8) def __setattr__(self, name, value): if name in self.alpha_numeric_fields: - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name] - ) - elif name == 'file_id_mod': + value = self.validate_alpha_numeric_field(value, self.field_lengths[name]) + elif name == "file_id_mod": value = self.validate_file_id_mod(value) else: - raise AchError( - '%s not in alpha numeric field list' % name - ) + raise AchError("%s not in alpha numeric field list" % name) super(Header, self).__setattr__(name, value) def validate_file_id_mod(self, file_id_mod): - ''' + """ Validates the file ID modifier. It has to be ascii_uppercase and one character in length - ''' + """ if file_id_mod not in string.ascii_uppercase and len(file_id_mod) != 1: raise AchError("Invalid file_id_mod") @@ -183,19 +190,21 @@ def get_row(self): returns concatenated string of all parameters in nacha file """ - return self.record_type_code +\ - self.priority_code +\ - self.immediate_dest +\ - self.immediate_org +\ - self.file_crt_date +\ - self.file_crt_time +\ - self.file_id_mod +\ - self.record_size +\ - self.blk_factor +\ - self.format_code +\ - self.im_dest_name +\ - self.im_orgn_name +\ - self.reference_code + return ( + self.record_type_code + + self.priority_code + + self.immediate_dest + + self.immediate_org + + self.file_crt_date + + self.file_crt_time + + self.file_id_mod + + self.record_size + + self.blk_factor + + self.format_code + + self.im_dest_name + + self.im_orgn_name + + self.reference_code + ) def get_count(self): """ @@ -211,28 +220,40 @@ class FileControl(Ach): Appears at the end of file """ - record_type_code = '9' + record_type_code = "9" numeric_fields = [ - 'batch_count', 'block_count', 'entadd_count', 'entry_hash', - 'debit_amount', 'credit_amount' + "batch_count", + "block_count", + "entadd_count", + "entry_hash", + "debit_amount", + "credit_amount", ] - alpha_numeric_fields = ['reserved', ] + alpha_numeric_fields = [ + "reserved", + ] field_lengths = { - 'batch_count': 6, - 'block_count': 6, - 'entadd_count': 8, - 'entry_hash': 10, - 'debit_amount': 12, - 'credit_amount': 12, - 'reserved': 39, + "batch_count": 6, + "block_count": 6, + "entadd_count": 8, + "entry_hash": 10, + "debit_amount": 12, + "credit_amount": 12, + "reserved": 39, } - def __init__(self, batch_count, block_count, - entadd_count, entry_hash, debit_amount, - credit_amount): + def __init__( + self, + batch_count, + block_count, + entadd_count, + entry_hash, + debit_amount, + credit_amount, + ): """ Initializes all the values we need for our file control record """ @@ -247,30 +268,26 @@ def __init__(self, batch_count, block_count, def __setattr__(self, name, value): if name in self.numeric_fields: - value = self.validate_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_numeric_field(value, self.field_lengths[name]) elif name in self.alpha_numeric_fields: - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_alpha_numeric_field(value, self.field_lengths[name]) else: - raise AchError( - '%s not in numeric field list' % name - ) + raise AchError("%s not in numeric field list" % name) super(FileControl, self).__setattr__(name, value) def get_row(self): - return self.record_type_code +\ - self.batch_count +\ - self.block_count +\ - self.entadd_count +\ - self.entry_hash +\ - self.debit_amount +\ - self.credit_amount +\ - self.reserved + return ( + self.record_type_code + + self.batch_count + + self.block_count + + self.entadd_count + + self.entry_hash + + self.debit_amount + + self.credit_amount + + self.reserved + ) def get_count(self): return len(self.get_row()) @@ -278,40 +295,68 @@ def get_count(self): class BatchHeader(Ach): - record_type_code = '5' - - std_ent_cls_code_list = ['ARC', 'PPD', 'CTX', 'POS', 'WEB', - 'BOC', 'TEL', 'MTE', 'SHR', 'CCD', - 'CIE', 'POP', 'RCK'] + record_type_code = "5" + + std_ent_cls_code_list = [ + "ARC", + "PPD", + "CTX", + "POS", + "WEB", + "BOC", + "TEL", + "MTE", + "SHR", + "CCD", + "CIE", + "POP", + "RCK", + ] - serv_cls_code_list = ['200', '220', '225'] + serv_cls_code_list = ["200", "220", "225"] - numeric_fields = ['orig_dfi_id', 'batch_id', - 'eff_ent_date', 'serv_cls_code'] + numeric_fields = ["orig_dfi_id", "batch_id", "eff_ent_date", "serv_cls_code"] - alpha_numeric_fields = ['company_name', 'cmpy_dis_data', 'company_id', - 'std_ent_cls_code', 'entry_desc', 'desc_date', - 'orig_stat_code', 'settlement_date'] + alpha_numeric_fields = [ + "company_name", + "cmpy_dis_data", + "company_id", + "std_ent_cls_code", + "entry_desc", + "desc_date", + "orig_stat_code", + "settlement_date", + ] field_lengths = { - 'serv_cls_code': 3, - 'company_name': 16, - 'cmpy_dis_data': 20, - 'company_id': 10, - 'std_ent_cls_code': 3, - 'entry_desc': 10, - 'desc_date': 6, - 'eff_ent_date': 6, - 'settlement_date': 3, - 'orig_stat_code': 1, - 'orig_dfi_id': 8, - 'batch_id': 7, + "serv_cls_code": 3, + "company_name": 16, + "cmpy_dis_data": 20, + "company_id": 10, + "std_ent_cls_code": 3, + "entry_desc": 10, + "desc_date": 6, + "eff_ent_date": 6, + "settlement_date": 3, + "orig_stat_code": 1, + "orig_dfi_id": 8, + "batch_id": 7, } - def __init__(self, serv_cls_code='220', company_name='', cmpy_dis_data='', - company_id='', std_ent_cls_code='PPD', entry_desc='', - desc_date='', eff_ent_date='', orig_stat_code='', - orig_dfi_id='', batch_id=''): + def __init__( + self, + serv_cls_code="220", + company_name="", + cmpy_dis_data="", + company_id="", + std_ent_cls_code="PPD", + entry_desc="", + desc_date="", + eff_ent_date="", + orig_stat_code="", + orig_dfi_id="", + batch_id="", + ): """ Initializes and validates the values for our Batch Header rows. We use 220 and PPD as the default values for serv_cls_code @@ -323,10 +368,10 @@ def __init__(self, serv_cls_code='220', company_name='', cmpy_dis_data='', self.settlement_date = self.make_space(3) for key in args: - if key == 'self': + if key == "self": continue - if args[key] != '': + if args[key] != "": self.__setattr__(key, args[key]) elif key in self.numeric_fields: @@ -338,45 +383,39 @@ def __init__(self, serv_cls_code='220', company_name='', cmpy_dis_data='', def __setattr__(self, name, value): if name in self.numeric_fields: - if name == 'serv_cls_code' \ - and str(value) not in self.serv_cls_code_list: + if name == "serv_cls_code" and str(value) not in self.serv_cls_code_list: raise AchError("%s not in serv_cls_code_list" % value) - value = self.validate_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_numeric_field(value, self.field_lengths[name]) elif name in self.alpha_numeric_fields: - if name == 'std_ent_cls_code' \ - and str(value) not in self.std_ent_cls_code_list: + if name == "std_ent_cls_code" and str(value) not in self.std_ent_cls_code_list: raise AchError("%s not in std_ent_cls_code_list" % value) - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_alpha_numeric_field(value, self.field_lengths[name]) else: - raise AchError( - '%s not in numeric or alpha numeric fields list' % name - ) + raise AchError("%s not in numeric or alpha numeric fields list" % name) super(BatchHeader, self).__setattr__(name, value) def get_row(self): - return self.record_type_code +\ - self.serv_cls_code +\ - self.company_name +\ - self.cmpy_dis_data +\ - self.company_id +\ - self.std_ent_cls_code +\ - self.entry_desc +\ - self.desc_date +\ - self.eff_ent_date +\ - self.settlement_date +\ - self.orig_stat_code +\ - self.orig_dfi_id +\ - self.batch_id + return ( + self.record_type_code + + self.serv_cls_code + + self.company_name + + self.cmpy_dis_data + + self.company_id + + self.std_ent_cls_code + + self.entry_desc + + self.desc_date + + self.eff_ent_date + + self.settlement_date + + self.orig_stat_code + + self.orig_dfi_id + + self.batch_id + ) def get_count(self): return len(self.get_row()) @@ -384,30 +423,45 @@ def get_count(self): class BatchControl(Ach): - record_type_code = '8' + record_type_code = "8" - numeric_fields = ['serv_cls_code', 'entadd_count', 'entry_hash', - 'debit_amount', 'credit_amount', 'orig_dfi_id', - 'batch_id'] + numeric_fields = [ + "serv_cls_code", + "entadd_count", + "entry_hash", + "debit_amount", + "credit_amount", + "orig_dfi_id", + "batch_id", + ] - alpha_numeric_fields = ['company_id', 'mesg_auth_code', 'reserved'] + alpha_numeric_fields = ["company_id", "mesg_auth_code", "reserved"] field_lengths = { - 'serv_cls_code': 3, - 'entadd_count': 6, - 'entry_hash': 10, - 'debit_amount': 12, - 'credit_amount': 12, - 'company_id': 10, - 'mesg_auth_code': 19, - 'reserved': 6, - 'orig_dfi_id': 8, - 'batch_id': 7, + "serv_cls_code": 3, + "entadd_count": 6, + "entry_hash": 10, + "debit_amount": 12, + "credit_amount": 12, + "company_id": 10, + "mesg_auth_code": 19, + "reserved": 6, + "orig_dfi_id": 8, + "batch_id": 7, } - def __init__(self, serv_cls_code='220', entadd_count='', entry_hash='', - debit_amount='', credit_amount='', company_id='', - orig_dfi_id='', batch_id='', mesg_auth_code=''): + def __init__( + self, + serv_cls_code="220", + entadd_count="", + entry_hash="", + debit_amount="", + credit_amount="", + company_id="", + orig_dfi_id="", + batch_id="", + mesg_auth_code="", + ): """ Initializes and validates the batch control record """ @@ -416,11 +470,11 @@ def __init__(self, serv_cls_code='220', entadd_count='', entry_hash='', self.reserved = self.make_space(6) for key in args: - if key == 'self': + if key == "self": continue - if args[key] != '': - if key == 'debit_amount' or key == 'credit_amount': + if args[key] != "": + if key == "debit_amount" or key == "credit_amount": self.__setattr__(key, int(100 * args[key])) else: self.__setattr__(key, args[key]) @@ -433,33 +487,29 @@ def __init__(self, serv_cls_code='220', entadd_count='', entry_hash='', def __setattr__(self, name, value): if name in self.numeric_fields: - value = self.validate_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_numeric_field(value, self.field_lengths[name]) elif name in self.alpha_numeric_fields: - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_alpha_numeric_field(value, self.field_lengths[name]) else: - raise AchError( - "%s not in numeric_fields or alpha_numeric_fields" % name - ) + raise AchError("%s not in numeric_fields or alpha_numeric_fields" % name) super(BatchControl, self).__setattr__(name, value) def get_row(self): - return self.record_type_code +\ - self.serv_cls_code +\ - self.entadd_count +\ - self.entry_hash +\ - self.debit_amount +\ - self.credit_amount +\ - self.company_id +\ - self.mesg_auth_code +\ - self.reserved +\ - self.orig_dfi_id +\ - self.batch_id + return ( + self.record_type_code + + self.serv_cls_code + + self.entadd_count + + self.entry_hash + + self.debit_amount + + self.credit_amount + + self.company_id + + self.mesg_auth_code + + self.reserved + + self.orig_dfi_id + + self.batch_id + ) def get_count(self): return len(self.get_row()) @@ -470,57 +520,109 @@ class EntryDetail(Ach): Object represents a single Entry Detail record of an ACH file """ - record_type_code = '6' - - std_ent_cls_code_list = ['ARC', 'PPD', 'CTX', 'POS', 'WEB', - 'BOC', 'TEL', 'MTE', 'SHR', 'CCD', - 'CIE', 'POP', 'RCK'] + record_type_code = "6" + + std_ent_cls_code_list = [ + "ARC", + "PPD", + "CTX", + "POS", + "WEB", + "BOC", + "TEL", + "MTE", + "SHR", + "CCD", + "CIE", + "POP", + "RCK", + ] - numeric_fields = ['transaction_code', 'recv_dfi_id', 'check_digit', - 'amount', 'num_add_recs', 'card_exp_date', 'doc_ref_num', - 'ind_card_acct_num', 'card_tr_typ_code_shr', - 'add_rec_ind', 'trace_num'] + numeric_fields = [ + "transaction_code", + "recv_dfi_id", + "check_digit", + "amount", + "num_add_recs", + "card_exp_date", + "doc_ref_num", + "ind_card_acct_num", + "card_tr_typ_code_shr", + "add_rec_ind", + "trace_num", + ] - alpha_numeric_fields = ['dfi_acnt_num', 'chk_serial_num', 'ind_name', - 'disc_data', 'id_number', 'recv_cmpy_name', - 'terminal_city', 'terminal_state', 'reserved', - 'card_tr_typ_code_pos', 'pmt_type_code'] + alpha_numeric_fields = [ + "dfi_acnt_num", + "chk_serial_num", + "ind_name", + "disc_data", + "id_number", + "recv_cmpy_name", + "terminal_city", + "terminal_state", + "reserved", + "card_tr_typ_code_pos", + "pmt_type_code", + ] field_lengths = { - 'transaction_code' : 2, - 'recv_dfi_id' : [8, 9], - 'check_digit' : 1, - 'dfi_acnt_num' : 17, - 'amount' : 10, - 'chk_serial_num' : [9, #POP - 15,], #ARC, BOC - 'ind_name' : [15, #CIE, MTE - 22,], #ARC, BOC, CCD, PPD, TEL, POP, POS, WEB - 'disc_data' : 2, - 'id_number' : 15, - 'ind_id' : 22, - 'num_add_recs' : 4, - 'recv_cmpy_name' : 16, - 'reserved' : 2, - 'terminal_city' : 4, - 'terminal_state' : 2, - 'card_tr_typ_code_pos' : 2, - 'card_tr_typ_code_shr' : 2, - 'card_exp_date' : 4, - 'doc_ref_num' : 11, - 'ind_card_acct_num' : 22, - 'pmt_type_code' : 2, - 'add_rec_ind' : 1, - 'trace_num' : 15, + "transaction_code": 2, + "recv_dfi_id": [8, 9], + "check_digit": 1, + "dfi_acnt_num": 17, + "amount": 10, + "chk_serial_num": [ + 9, # POP + 15, + ], # ARC, BOC + "ind_name": [ + 15, # CIE, MTE + 22, + ], # ARC, BOC, CCD, PPD, TEL, POP, POS, WEB + "disc_data": 2, + "id_number": 15, + "ind_id": 22, + "num_add_recs": 4, + "recv_cmpy_name": 16, + "reserved": 2, + "terminal_city": 4, + "terminal_state": 2, + "card_tr_typ_code_pos": 2, + "card_tr_typ_code_shr": 2, + "card_exp_date": 4, + "doc_ref_num": 11, + "ind_card_acct_num": 22, + "pmt_type_code": 2, + "add_rec_ind": 1, + "trace_num": 15, } - def __init__(self, std_ent_cls_code='PPD', transaction_code='', recv_dfi_id='', - check_digit='', amount='', num_add_recs='', card_exp_date='', - doc_ref_num='', ind_card_acct_num='', card_tr_typ_code_shr='', - card_tr_typ_code_pos='', trace_num='', dfi_acnt_num='', - ind_name='', disc_data='', id_number='', recv_cmpy_name='', - chk_serial_num='', terminal_city='', terminal_state='', - pmt_type_code='', add_rec_ind=''): + def __init__( + self, + std_ent_cls_code="PPD", + transaction_code="", + recv_dfi_id="", + check_digit="", + amount="", + num_add_recs="", + card_exp_date="", + doc_ref_num="", + ind_card_acct_num="", + card_tr_typ_code_shr="", + card_tr_typ_code_pos="", + trace_num="", + dfi_acnt_num="", + ind_name="", + disc_data="", + id_number="", + recv_cmpy_name="", + chk_serial_num="", + terminal_city="", + terminal_state="", + pmt_type_code="", + add_rec_ind="", + ): """ Initialize and validate the values in Entry Detail record """ @@ -530,32 +632,26 @@ def __init__(self, std_ent_cls_code='PPD', transaction_code='', recv_dfi_id='', fields = locals().copy() for key in fields: - if key == 'self': + if key == "self": continue - if fields[key] != '': + if fields[key] != "": self.__setattr__(key, fields[key]) - elif key in ['chk_serial_num', 'ind_name']: - if self.std_ent_cls_code in ['CIE', 'MTE', 'POP']: - self.__setattr__( - key, self.make_space(self.field_lengths[key][0]) - ) + elif key in ["chk_serial_num", "ind_name"]: + if self.std_ent_cls_code in ["CIE", "MTE", "POP"]: + self.__setattr__(key, self.make_space(self.field_lengths[key][0])) else: - self.__setattr__( - key, self.make_space(self.field_lengths[key][1]) - ) + self.__setattr__(key, self.make_space(self.field_lengths[key][1])) elif key in self.numeric_fields: - if key == 'recv_dfi_id': + if key == "recv_dfi_id": self.__setattr__(key, self.make_zero(self.field_lengths[key][0])) else: self.__setattr__(key, self.make_zero(self.field_lengths[key])) elif key in self.alpha_numeric_fields: - self.__setattr__( - key, self.make_space(self.field_lengths[key]) - ) + self.__setattr__(key, self.make_space(self.field_lengths[key])) def __setattr__(self, name, value): """ @@ -565,34 +661,23 @@ def __setattr__(self, name, value): if name in self.alpha_numeric_fields: # Special handling for Indvidiual/Company name field - if name == 'ind_name' and self.std_ent_cls_code in ['CIE', 'MTE']: - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name][0] - ) - elif name == 'ind_name': - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name][1] - ) + if name == "ind_name" and self.std_ent_cls_code in ["CIE", "MTE"]: + value = self.validate_alpha_numeric_field(value, self.field_lengths[name][0]) + elif name == "ind_name": + value = self.validate_alpha_numeric_field(value, self.field_lengths[name][1]) # Special handling for Check serial number field - elif name == 'chk_serial_num' and \ - self.std_ent_cls_code_list == 'POP': - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name][0] - ) - elif name == 'chk_serial_num': - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name][1] - ) - - #The rest + elif name == "chk_serial_num" and self.std_ent_cls_code_list == "POP": + value = self.validate_alpha_numeric_field(value, self.field_lengths[name][0]) + elif name == "chk_serial_num": + value = self.validate_alpha_numeric_field(value, self.field_lengths[name][1]) + + # The rest else: - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_alpha_numeric_field(value, self.field_lengths[name]) elif name in self.numeric_fields: - if name == 'recv_dfi_id': + if name == "recv_dfi_id": try: # try 8 digits first value = self.validate_numeric_field(value, self.field_lengths[name][0]) @@ -600,90 +685,60 @@ def __setattr__(self, name, value): # now try to validate it 9 instead value = self.validate_numeric_field(value, self.field_lengths[name][1]) else: - value = self.validate_numeric_field( value, self.field_lengths[name] ) + value = self.validate_numeric_field(value, self.field_lengths[name]) - elif name == 'std_ent_cls_code' and \ - value in self.std_ent_cls_code_list: + elif name == "std_ent_cls_code" and value in self.std_ent_cls_code_list: pass else: - raise AchError( - "%s not in numeric_fields or alpha_numeric_fields" % name - ) + raise AchError("%s not in numeric_fields or alpha_numeric_fields" % name) super(EntryDetail, self).__setattr__(name, value) def get_row(self): - ret_string = '' + ret_string = "" - ret_string = self.record_type_code +\ - self.transaction_code +\ - self.recv_dfi_id + ret_string = self.record_type_code + self.transaction_code + self.recv_dfi_id if len(self.recv_dfi_id) < 9: ret_string += self.check_digit - ret_string += self.dfi_acnt_num +\ - self.amount - - if self.std_ent_cls_code in ['ARC', 'BOC']: - ret_string += self.chk_serial_num +\ - self.ind_name +\ - self.disc_data - - elif self.std_ent_cls_code in ['CCD', 'PPD', 'TEL']: - ret_string += self.id_number +\ - self.ind_name +\ - self.disc_data - - elif self.std_ent_cls_code == 'CIE': - ret_string += self.ind_name +\ - self.ind_id +\ - self.disc_data - - elif self.std_ent_cls_code == 'CTX': - ret_string += self.id_number +\ - self.num_add_recs +\ - self.recv_cmpy_name +\ - self.reserved +\ - self.disc_data - - elif self.std_ent_cls_code == 'MTE': - ret_string += self.ind_name +\ - self.ind_id +\ - self.disc_data - - elif self.std_ent_cls_code == 'POP': - ret_string += self.chk_serial_num +\ - self.terminal_city +\ - self.terminal_state +\ - self.ind_name +\ - self.disc_data - - elif self.std_ent_cls_code == 'POS': - ret_string += self.id_number +\ - self.ind_name +\ - self.card_tr_typ_code_pos - - elif self.std_ent_cls_code == 'SHR': - ret_string += self.card_exp_date +\ - self.doc_ref_num +\ - self.ind_card_acct_num +\ - self.card_tr_typ_code_shr - - elif self.std_ent_cls_code == 'RCK': - ret_string += self.chk_serial_num +\ - self.ind_name +\ - self.disc_data - - elif self.std_ent_cls_code == 'WEB': - ret_string += self.id_number +\ - self.ind_name +\ - self.pmt_type_code - - ret_string += self.add_rec_ind +\ - self.trace_num + ret_string += self.dfi_acnt_num + self.amount + + if self.std_ent_cls_code in ["ARC", "BOC"]: + ret_string += self.chk_serial_num + self.ind_name + self.disc_data + + elif self.std_ent_cls_code in ["CCD", "PPD", "TEL"]: + ret_string += self.id_number + self.ind_name + self.disc_data + + elif self.std_ent_cls_code == "CIE": + ret_string += self.ind_name + self.ind_id + self.disc_data + + elif self.std_ent_cls_code == "CTX": + ret_string += self.id_number + self.num_add_recs + self.recv_cmpy_name + self.reserved + self.disc_data + + elif self.std_ent_cls_code == "MTE": + ret_string += self.ind_name + self.ind_id + self.disc_data + + elif self.std_ent_cls_code == "POP": + ret_string += ( + self.chk_serial_num + self.terminal_city + self.terminal_state + self.ind_name + self.disc_data + ) + + elif self.std_ent_cls_code == "POS": + ret_string += self.id_number + self.ind_name + self.card_tr_typ_code_pos + + elif self.std_ent_cls_code == "SHR": + ret_string += self.card_exp_date + self.doc_ref_num + self.ind_card_acct_num + self.card_tr_typ_code_shr + + elif self.std_ent_cls_code == "RCK": + ret_string += self.chk_serial_num + self.ind_name + self.disc_data + + elif self.std_ent_cls_code == "WEB": + ret_string += self.id_number + self.ind_name + self.pmt_type_code + + ret_string += self.add_rec_ind + self.trace_num return ret_string @@ -706,46 +761,70 @@ def calc_check_digit(self): class AddendaRecord(Ach): - record_type_code = '7' - addenda_type_code = '05' + record_type_code = "7" + addenda_type_code = "05" alpha_numeric_fields = [ - 'trans_desc', 'net_id_code', 'term_id_code', - 'trans_serial_code', 'terminal_loc', 'terminal_city', - 'terminal_state', 'ref_info_1', 'ref_info_2', 'pmt_rel_info', - 'auth_card_exp' + "trans_desc", + "net_id_code", + "term_id_code", + "trans_serial_code", + "terminal_loc", + "terminal_city", + "terminal_state", + "ref_info_1", + "ref_info_2", + "pmt_rel_info", + "auth_card_exp", ] numeric_fields = [ - 'trans_date', 'trans_time', 'trace_num', - 'ent_det_seq_num', 'add_seq_num' + "trans_date", + "trans_time", + "trace_num", + "ent_det_seq_num", + "add_seq_num", ] field_lengths = { - 'trans_desc': 7, - 'net_id_code': 3, - 'term_id_code': 6, - 'trans_serial_code': 6, - 'terminal_loc': 27, - 'terminal_city': 15, - 'terminal_state': 2, - 'ref_info_1': 7, - 'ref_info_2': 3, - 'pmt_rel_info': 80, - 'trans_date': 4, - 'trans_time': 6, - 'trace_num': 15, - 'ent_det_seq_num': 7, - 'auth_card_exp': 6, - 'add_seq_num': 4, + "trans_desc": 7, + "net_id_code": 3, + "term_id_code": 6, + "trans_serial_code": 6, + "terminal_loc": 27, + "terminal_city": 15, + "terminal_state": 2, + "ref_info_1": 7, + "ref_info_2": 3, + "pmt_rel_info": 80, + "trans_date": 4, + "trans_time": 6, + "trace_num": 15, + "ent_det_seq_num": 7, + "auth_card_exp": 6, + "add_seq_num": 4, } - def __init__(self, std_ent_cls_code='PPD', trans_desc='', net_id_code='', - term_id_code='', ref_info_1='', ref_info_2='', - trans_serial_code='', trans_date='', trans_time='', - terminal_loc='', terminal_city='', terminal_state='', - trace_num='', auth_card_exp='', add_seq_num='', - ent_det_seq_num='', pmt_rel_info=''): + def __init__( + self, + std_ent_cls_code="PPD", + trans_desc="", + net_id_code="", + term_id_code="", + ref_info_1="", + ref_info_2="", + trans_serial_code="", + trans_date="", + trans_time="", + terminal_loc="", + terminal_city="", + terminal_state="", + trace_num="", + auth_card_exp="", + add_seq_num="", + ent_det_seq_num="", + pmt_rel_info="", + ): """ Initializes and validates values in entry addenda rows """ @@ -756,10 +835,10 @@ def __init__(self, std_ent_cls_code='PPD', trans_desc='', net_id_code='', for key in fields: - if key == 'self': + if key == "self": continue - if fields[key] != '': + if fields[key] != "": self.__setattr__(key, fields[key]) elif key in self.numeric_fields: @@ -771,57 +850,52 @@ def __init__(self, std_ent_cls_code='PPD', trans_desc='', net_id_code='', def __setattr__(self, name, value): if name in self.alpha_numeric_fields: - value = self.validate_alpha_numeric_field( - value, self.field_lengths[name] - ) + value = self.validate_alpha_numeric_field(value, self.field_lengths[name]) elif name in self.numeric_fields: - value = self.validate_numeric_field( - value, self.field_lengths[name] - ) - elif name == 'std_ent_cls_code': + value = self.validate_numeric_field(value, self.field_lengths[name]) + elif name == "std_ent_cls_code": pass else: - raise AchError( - "%s not in numeric or alpha numeric fields" % value - ) + raise AchError("%s not in numeric or alpha numeric fields" % value) super(AddendaRecord, self).__setattr__(name, value) def get_row(self): - ret_string = '' - - ret_string += self.record_type_code +\ - self.addenda_type_code - - if self.std_ent_cls_code == 'MTE': - ret_string += self.trans_desc +\ - self.net_id_code +\ - self.term_id_code +\ - self.trans_serial_code +\ - self.trans_date +\ - self.trans_time +\ - self.terminal_loc +\ - self.terminal_city +\ - self.terminal_state +\ - self.trace_num - - elif self.std_ent_cls_code in ['POS', 'SHR']: - ret_string += self.ref_info_1 +\ - self.ref_info_2 +\ - self.term_id_code +\ - self.trans_serial_code +\ - self.trans_date +\ - self.auth_card_exp +\ - self.terminal_loc +\ - self.terminal_city +\ - self.terminal_state +\ - self.trace_num + ret_string = "" + + ret_string += self.record_type_code + self.addenda_type_code + + if self.std_ent_cls_code == "MTE": + ret_string += ( + self.trans_desc + + self.net_id_code + + self.term_id_code + + self.trans_serial_code + + self.trans_date + + self.trans_time + + self.terminal_loc + + self.terminal_city + + self.terminal_state + + self.trace_num + ) + + elif self.std_ent_cls_code in ["POS", "SHR"]: + ret_string += ( + self.ref_info_1 + + self.ref_info_2 + + self.term_id_code + + self.trans_serial_code + + self.trans_date + + self.auth_card_exp + + self.terminal_loc + + self.terminal_city + + self.terminal_state + + self.trace_num + ) else: - ret_string += self.pmt_rel_info +\ - self.add_seq_num +\ - self.ent_det_seq_num + ret_string += self.pmt_rel_info + self.add_seq_num + self.ent_det_seq_num return ret_string diff --git a/ach/parser.py b/ach/parser.py index 1ae28f9..155aaaf 100644 --- a/ach/parser.py +++ b/ach/parser.py @@ -8,348 +8,348 @@ class Parser(object): Parser for ACH files """ - FILE_HEADER = '1' - FILE_CONTROL = '9' - BATCH_HEADER = '5' - BATCH_CONTROL = '8' - ENTRY_DETAIL = '6' - ADDENDA_RECORD = '7' + FILE_HEADER = "1" + FILE_CONTROL = "9" + BATCH_HEADER = "5" + BATCH_CONTROL = "8" + ENTRY_DETAIL = "6" + ADDENDA_RECORD = "7" FILE_HEADER_DEF = [ { - 'field': 'record_type_code', - 'pos': 0, - 'len': 1, + "field": "record_type_code", + "pos": 0, + "len": 1, }, { - 'field': 'priority_code', - 'pos': 1, - 'len': 2, + "field": "priority_code", + "pos": 1, + "len": 2, }, { - 'field': 'immediate_dest', - 'pos': 3, - 'len': 10, + "field": "immediate_dest", + "pos": 3, + "len": 10, }, { - 'field': 'immediate_org', - 'pos': 13, - 'len': 10, + "field": "immediate_org", + "pos": 13, + "len": 10, }, { - 'field': 'file_crt_date', - 'pos': 23, - 'len': 6, + "field": "file_crt_date", + "pos": 23, + "len": 6, }, { - 'field': 'file_crt_time', - 'pos': 29, - 'len': 4, + "field": "file_crt_time", + "pos": 29, + "len": 4, }, { - 'field': 'file_id_mod', - 'pos': 33, - 'len': 1, + "field": "file_id_mod", + "pos": 33, + "len": 1, }, { - 'field': 'record_size', - 'pos': 34, - 'len': 3, + "field": "record_size", + "pos": 34, + "len": 3, }, { - 'field': 'blk_factor', - 'pos': 37, - 'len': 2, + "field": "blk_factor", + "pos": 37, + "len": 2, }, { - 'field': 'format_code', - 'pos': 39, - 'len': 1, + "field": "format_code", + "pos": 39, + "len": 1, }, { - 'field': 'im_dest_name ', - 'pos': 40, - 'len': 23, + "field": "im_dest_name ", + "pos": 40, + "len": 23, }, { - 'field': 'im_orgn_name ', - 'pos': 63, - 'len': 23, + "field": "im_orgn_name ", + "pos": 63, + "len": 23, }, { - 'field': 'reference_code', - 'pos': 86, - 'len': 8, - } + "field": "reference_code", + "pos": 86, + "len": 8, + }, ] FILE_CONTROL_DEF = [ { - 'field': 'record_type_code', - 'pos': 0, - 'len': 1, + "field": "record_type_code", + "pos": 0, + "len": 1, }, { - 'field': 'batch_count', - 'pos': 1, - 'len': 6, + "field": "batch_count", + "pos": 1, + "len": 6, }, { - 'field': 'block_count', - 'pos': 7, - 'len': 6, + "field": "block_count", + "pos": 7, + "len": 6, }, { - 'field': 'entadd_count', - 'pos': 13, - 'len': 8, + "field": "entadd_count", + "pos": 13, + "len": 8, }, { - 'field': 'entry_hash', - 'pos': 21, - 'len': 10, + "field": "entry_hash", + "pos": 21, + "len": 10, }, { - 'field': 'debit_amount', - 'pos': 31, - 'len': 12, + "field": "debit_amount", + "pos": 31, + "len": 12, }, { - 'field': 'credit_amount', - 'pos': 43, - 'len': 12, + "field": "credit_amount", + "pos": 43, + "len": 12, }, { - 'field': 'reserved', - 'pos': 55, - 'len': 39, + "field": "reserved", + "pos": 55, + "len": 39, }, ] BATCH_HEADER_DEF = [ { - 'field': 'record_type_code', - 'pos': 0, - 'len': 1, + "field": "record_type_code", + "pos": 0, + "len": 1, }, { - 'field': 'serv_cls_code', - 'pos': 1, - 'len': 3, + "field": "serv_cls_code", + "pos": 1, + "len": 3, }, { - 'field': 'company_name', - 'pos': 4, - 'len': 16, + "field": "company_name", + "pos": 4, + "len": 16, }, { - 'field': 'cmpy_dis_data', - 'pos': 20, - 'len': 20, + "field": "cmpy_dis_data", + "pos": 20, + "len": 20, }, { - 'field': 'company_id', - 'pos': 40, - 'len': 10, + "field": "company_id", + "pos": 40, + "len": 10, }, { - 'field': 'std_ent_cls_code', - 'pos': 50, - 'len': 3, + "field": "std_ent_cls_code", + "pos": 50, + "len": 3, }, { - 'field': 'entry_desc', - 'pos': 53, - 'len': 10, + "field": "entry_desc", + "pos": 53, + "len": 10, }, { - 'field': 'desc_date', - 'pos': 63, - 'len': 6, + "field": "desc_date", + "pos": 63, + "len": 6, }, { - 'field': 'eff_ent_date', - 'pos': 69, - 'len': 6, + "field": "eff_ent_date", + "pos": 69, + "len": 6, }, { - 'field': 'settlement_date', - 'pos': 75, - 'len': 3, + "field": "settlement_date", + "pos": 75, + "len": 3, }, { - 'field': 'orig_stat_code', - 'pos': 78, - 'len': 1, + "field": "orig_stat_code", + "pos": 78, + "len": 1, }, { - 'field': 'orig_dfi_id', - 'pos': 79, - 'len': 8, + "field": "orig_dfi_id", + "pos": 79, + "len": 8, }, { - 'field': 'batch_id', - 'pos': 87, - 'len': 7, + "field": "batch_id", + "pos": 87, + "len": 7, }, ] BATCH_CONTROL_DEF = [ { - 'field': 'record_type_code', - 'pos': 0, - 'len': 1, + "field": "record_type_code", + "pos": 0, + "len": 1, }, { - 'field': 'serv_cls_code', - 'pos': 1, - 'len': 3, + "field": "serv_cls_code", + "pos": 1, + "len": 3, }, { - 'field': 'entadd_count', - 'pos': 4, - 'len': 6, + "field": "entadd_count", + "pos": 4, + "len": 6, }, { - 'field': 'entry_hash', - 'pos': 10, - 'len': 10, + "field": "entry_hash", + "pos": 10, + "len": 10, }, { - 'field': 'debit_amount', - 'pos': 20, - 'len': 12, + "field": "debit_amount", + "pos": 20, + "len": 12, }, { - 'field': 'credit_amount', - 'pos': 32, - 'len': 12, + "field": "credit_amount", + "pos": 32, + "len": 12, }, { - 'field': 'company_id', - 'pos': 44, - 'len': 10, + "field": "company_id", + "pos": 44, + "len": 10, }, { - 'field': 'mesg_auth_code', - 'pos': 54, - 'len': 19, + "field": "mesg_auth_code", + "pos": 54, + "len": 19, }, { - 'field': 'reserved', - 'pos': 73, - 'len': 6, + "field": "reserved", + "pos": 73, + "len": 6, }, { - 'field': 'orig_dfi_id', - 'pos': 79, - 'len': 8, + "field": "orig_dfi_id", + "pos": 79, + "len": 8, }, { - 'field': 'orig_dfi_id', - 'pos': 87, - 'len': 7, + "field": "orig_dfi_id", + "pos": 87, + "len": 7, }, ] ENTRY_DETAIL_DEF = [ { - 'field': 'record_type_code', - 'pos': 0, - 'len': 1, + "field": "record_type_code", + "pos": 0, + "len": 1, }, { - 'field': 'transaction_code', - 'pos': 1, - 'len': 2, + "field": "transaction_code", + "pos": 1, + "len": 2, }, { - 'field': 'recv_dfi_id', - 'pos': 3, - 'len': 8, + "field": "recv_dfi_id", + "pos": 3, + "len": 8, }, { - 'field': 'check_digit', - 'pos': 11, - 'len': 1, + "field": "check_digit", + "pos": 11, + "len": 1, }, { - 'field': 'dfi_acnt_num', - 'pos': 12, - 'len': 17, + "field": "dfi_acnt_num", + "pos": 12, + "len": 17, }, { - 'field': 'amount', - 'pos': 29, - 'len': 10, + "field": "amount", + "pos": 29, + "len": 10, }, { - 'field': 'ind_id', - 'pos': 39, - 'len': 15, + "field": "ind_id", + "pos": 39, + "len": 15, }, { - 'field': 'ind_name', - 'pos': 54, - 'len': 22, + "field": "ind_name", + "pos": 54, + "len": 22, }, { - 'field': 'disc_data', - 'pos': 76, - 'len': 2, + "field": "disc_data", + "pos": 76, + "len": 2, }, { - 'field': 'add_rec_ind', - 'pos': 78, - 'len': 1, + "field": "add_rec_ind", + "pos": 78, + "len": 1, }, { - 'field': 'trace_num', - 'pos': 79, - 'len': 15, + "field": "trace_num", + "pos": 79, + "len": 15, }, ] ADDENDA_RECORD_DEF = [ { - 'field': 'record_type_code', - 'pos': 0, - 'len': 1, + "field": "record_type_code", + "pos": 0, + "len": 1, }, { - 'field': 'addenda_type_code', - 'pos': 1, - 'len': 2, + "field": "addenda_type_code", + "pos": 1, + "len": 2, }, { - 'field': 'pmt_rel_info', - 'pos': 3, - 'len': 80, + "field": "pmt_rel_info", + "pos": 3, + "len": 80, }, { - 'field': 'add_seq_num', - 'pos': 83, - 'len': 4, + "field": "add_seq_num", + "pos": 83, + "len": 4, }, { - 'field': 'ent_det_seq_num', - 'pos': 87, - 'len': 7, + "field": "ent_det_seq_num", + "pos": 87, + "len": 7, }, ] record_type_codes = { - '1': 'file_header', - '9': 'file_control', - '5': 'batch_header', - '8': 'batch_control', - '6': 'entry_detail', - '7': 'addenda_record', + "1": "file_header", + "9": "file_control", + "5": "batch_header", + "8": "batch_control", + "6": "entry_detail", + "7": "addenda_record", } def __init__(self, ach_file): self.ach_file = ach_file - self.ach_lines = ach_file.split('\n') + self.ach_lines = ach_file.split("\n") self.ach_data = {} self.__parse_file() @@ -363,58 +363,49 @@ def as_dict(self): def as_csv(self): data = self.as_dict() buffer = StringIO() - file_header = [f['field'] for f in self.FILE_HEADER_DEF] - batch_header = [f['field'] for f in self.BATCH_HEADER_DEF] - entry_header = [f['field'] for f in self.ENTRY_DETAIL_DEF] - addenda_header = [f['field'] for f in self.ADDENDA_RECORD_DEF] - batch_control = [f['field'] for f in self.BATCH_CONTROL_DEF] - file_control = [f['field'] for f in self.FILE_CONTROL_DEF] + file_header = [f["field"] for f in self.FILE_HEADER_DEF] + batch_header = [f["field"] for f in self.BATCH_HEADER_DEF] + entry_header = [f["field"] for f in self.ENTRY_DETAIL_DEF] + addenda_header = [f["field"] for f in self.ADDENDA_RECORD_DEF] + batch_control = [f["field"] for f in self.BATCH_CONTROL_DEF] + file_control = [f["field"] for f in self.FILE_CONTROL_DEF] writer = csv.DictWriter(buffer, fieldnames=file_header) writer.writeheader() - writer.writerow(data['file_header']) + writer.writerow(data["file_header"]) writer.writerow({}) - for batch in data['batches']: + for batch in data["batches"]: writer.fieldnames = batch_header writer.writeheader() - writer.writerow(batch['batch_header']) + writer.writerow(batch["batch_header"]) max_addenda = 0 - for entry in batch['entries']: - max_addenda = max(max_addenda, len(entry['addenda'])) + for entry in batch["entries"]: + max_addenda = max(max_addenda, len(entry["addenda"])) writer.writerow({}) if max_addenda <= 1: - writer.fieldnames = entry_header + [ - 'a_{}'.format(h) for h in addenda_header - ] + writer.fieldnames = entry_header + ["a_{}".format(h) for h in addenda_header] else: writer.fieldnames = entry_header + [ - 'a_{}_{}'.format(h, i) for h in addenda_header - for i in range(max_addenda) + "a_{}_{}".format(h, i) for h in addenda_header for i in range(max_addenda) ] writer.writeheader() - for entry in batch['entries']: - detail = dict(entry['entry_detail']) - if entry['addenda']: - if len(entry['addenda']) == 1: - detail.update({ - 'a_{}'.format(k): v - for k, v in entry['addenda'][0].items() - }) + for entry in batch["entries"]: + detail = dict(entry["entry_detail"]) + if entry["addenda"]: + if len(entry["addenda"]) == 1: + detail.update({"a_{}".format(k): v for k, v in entry["addenda"][0].items()}) else: - for i, addenda in enumerate(entry['addenda']): - detail.update({ - 'a_{}_{}'.format(k, i): v - for k, v in addenda.items() - }) + for i, addenda in enumerate(entry["addenda"]): + detail.update({"a_{}_{}".format(k, i): v for k, v in addenda.items()}) writer.writerow(detail) writer.writerow({}) writer.fieldnames = batch_control writer.writeheader() - writer.writerow(batch['batch_control']) + writer.writerow(batch["batch_control"]) writer.writerow({}) writer.fieldnames = file_control writer.writeheader() - writer.writerow(data['file_control']) + writer.writerow(data["file_control"]) return buffer.getvalue() def __parse_file(self): @@ -429,8 +420,8 @@ def __parse_line(self, line, record_type): record_data = {} for rule in defintions: - value = line[rule['pos']:rule['pos'] + rule['len']] - record_data[rule['field']] = value + value = line[rule["pos"] : rule["pos"] + rule["len"]] + record_data[rule["field"]] = value return record_data @@ -438,18 +429,14 @@ def __parse_file_header(self): for line in self.ach_lines: if line: if line[0] == self.FILE_HEADER: - self.ach_data['file_header'] = self.__parse_line( - line, 'FILE_HEADER_DEF' - ) + self.ach_data["file_header"] = self.__parse_line(line, "FILE_HEADER_DEF") break def __parse_file_control(self): for line in self.ach_lines: if line: if line[0] == self.FILE_CONTROL: - self.ach_data['file_control'] = self.__parse_line( - line, 'FILE_CONTROL_DEF' - ) + self.ach_data["file_control"] = self.__parse_line(line, "FILE_CONTROL_DEF") break def __get_batch_info(self): @@ -458,53 +445,46 @@ def __get_batch_info(self): for line_num, line in enumerate(self.ach_lines): if line: if line[0] == self.BATCH_HEADER: - batches.append({ - 'batch_header_line': line_num, - }) + batches.append( + { + "batch_header_line": line_num, + } + ) if line[0] == self.BATCH_CONTROL: - batches[len(batches) - 1]['batch_control_line'] = line_num + batches[len(batches) - 1]["batch_control_line"] = line_num return batches def __parse_batches(self, batch_info): - self.ach_data['batches'] = [] + self.ach_data["batches"] = [] for batch in batch_info: - self.ach_data['batches'].append({ - 'batch_header': self.__parse_line( - self.ach_lines[batch['batch_header_line']], - 'BATCH_HEADER_DEF' - ), - 'batch_control': self.__parse_line( - self.ach_lines[batch['batch_control_line']], - 'BATCH_CONTROL_DEF' - ), - 'entries': [], - }) + self.ach_data["batches"].append( + { + "batch_header": self.__parse_line(self.ach_lines[batch["batch_header_line"]], "BATCH_HEADER_DEF"), + "batch_control": self.__parse_line( + self.ach_lines[batch["batch_control_line"]], "BATCH_CONTROL_DEF" + ), + "entries": [], + } + ) - start = batch['batch_header_line'] + 1 - stop = batch['batch_control_line'] + start = batch["batch_header_line"] + 1 + stop = batch["batch_control_line"] for line_num in range(start, stop): if self.ach_lines[line_num]: - cur_batch = len(self.ach_data['batches']) - 1 - cur_entry = len( - self.ach_data['batches'][cur_batch]['entries'] - ) - 1 + cur_batch = len(self.ach_data["batches"]) - 1 + cur_entry = len(self.ach_data["batches"][cur_batch]["entries"]) - 1 if self.ach_lines[line_num][0] == self.ENTRY_DETAIL: - self.ach_data['batches'][cur_batch]['entries'].append({ - 'entry_detail': self.__parse_line( - self.ach_lines[line_num], - 'ENTRY_DETAIL_DEF' - ), - 'addenda': [] - }) + self.ach_data["batches"][cur_batch]["entries"].append( + { + "entry_detail": self.__parse_line(self.ach_lines[line_num], "ENTRY_DETAIL_DEF"), + "addenda": [], + } + ) if self.ach_lines[line_num][0] == self.ADDENDA_RECORD: - self.ach_data['batches'][cur_batch]['entries'][ - cur_entry - ]['addenda'].append( - self.__parse_line( - self.ach_lines[line_num], 'ADDENDA_RECORD_DEF' - ) + self.ach_data["batches"][cur_batch]["entries"][cur_entry]["addenda"].append( + self.__parse_line(self.ach_lines[line_num], "ADDENDA_RECORD_DEF") ) diff --git a/example.py b/example.py index 5c20e2d..1f530a6 100644 --- a/example.py +++ b/example.py @@ -1,69 +1,69 @@ from ach.builder import AchFile settings = { - 'immediate_dest' : '123456780', - 'immediate_org' : '123456780', - 'immediate_dest_name' : 'YOUR BANK', - 'immediate_org_name' : 'YOUR COMPANY', - 'company_id' : '1234567890', #tax number + "immediate_dest": "123456780", + "immediate_org": "123456780", + "immediate_dest_name": "YOUR BANK", + "immediate_org_name": "YOUR COMPANY", + "company_id": "1234567890", # tax number } -ach_file = AchFile('A', settings) #file Id mod +ach_file = AchFile("A", settings) # file Id mod entries = [ { - 'type' : '22', # type of - 'routing_number' : '12345678', - 'account_number' : '11232132', - 'amount' : '10.00', - 'name' : 'Alice Wanderdust', - 'addenda' : [ + "type": "22", # type of + "routing_number": "12345678", + "account_number": "11232132", + "amount": "10.00", + "name": "Alice Wanderdust", + "addenda": [ { - 'payment_related_info': 'Here is some additional information', + "payment_related_info": "Here is some additional information", }, ], }, { - 'type' : '27', - 'routing_number' : '12345678', - 'account_number' : '234234234', - 'amount' : '150.00', - 'name' : 'Billy Holiday', + "type": "27", + "routing_number": "12345678", + "account_number": "234234234", + "amount": "150.00", + "name": "Billy Holiday", }, { - 'type' : '22', - 'routing_number' : '123232318', - 'account_number' : '123123123', - 'amount' : '12.13', - 'name' : 'Rachel Welch', + "type": "22", + "routing_number": "123232318", + "account_number": "123123123", + "amount": "12.13", + "name": "Rachel Welch", }, ] -ach_file.add_batch('PPD', entries, credits=True, debits=True) +ach_file.add_batch("PPD", entries, credits=True, debits=True) print ach_file.render_to_string() # add_batch will skip failures and return them -ach_file = AchFile('B', settings) #file Id mod +ach_file = AchFile("B", settings) # file Id mod entries = [ { - 'type' : '27', - 'routing_number' : '********', # invalid - 'account_number' : '********', # invalid - 'amount' : '150.00', - 'name' : 'Billy Holiday', + "type": "27", + "routing_number": "********", # invalid + "account_number": "********", # invalid + "amount": "150.00", + "name": "Billy Holiday", }, { - 'type' : '22', - 'routing_number' : '123232318', - 'account_number' : '123123123', - 'amount' : '12.13', - 'name' : 'Rachel Welch', + "type": "22", + "routing_number": "123232318", + "account_number": "123123123", + "amount": "12.13", + "name": "Rachel Welch", }, ] -print ach_file.add_batch('PPD', entries, credits=True, debits=True) +print ach_file.add_batch("PPD", entries, credits=True, debits=True) print ach_file.render_to_string() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..d4c84cc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,18 @@ +[tool.black] +line-length = 120 +py36 = false +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | logs + | _build + | buck-out + | build + | dist +)/ +''' diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..e69de29 diff --git a/setup.py b/setup.py index e92b6f4..801df03 100644 --- a/setup.py +++ b/setup.py @@ -1,15 +1,15 @@ from setuptools import setup setup( - name='carta-ach', - author='Carta, Inc.', - author_email='james.uejio@carta.com', - version='0.4.6', + name="carta-ach", + author="Carta, Inc.", + author_email="james.uejio@carta.com", + version="0.4.7", packages=[ - 'ach', + "ach", ], - url='https://github.com/carta/python-ach', - license='MIT License', - description='Library to create and parse ACH files (NACHA)', - long_description=open('README.rst').read(), + url="https://github.com/carta/python-ach", + license="MIT License", + description="Library to create and parse ACH files (NACHA)", + long_description=open("README.rst").read(), ) diff --git a/tests/test_data_types.py b/tests/test_data_types.py index 3d04a40..b737b71 100644 --- a/tests/test_data_types.py +++ b/tests/test_data_types.py @@ -3,15 +3,12 @@ class TestDataTypes(object): - def setup(self): - ''' + """ We need to set up some data types - ''' + """ - self.header = dt.Header( - '123456789', '123456789', 'A', 'YOUR BANK', 'YOUR COMPANY' - ) + self.header = dt.Header("123456789", "123456789", "A", "YOUR BANK", "YOUR COMPANY") self.file_control = dt.FileControl(1, 1, 0, 213123123, 12300, 12300) self.batch_header = dt.BatchHeader() self.batch_control = dt.BatchControl() @@ -19,9 +16,9 @@ def setup(self): self.addenda_record = dt.AddendaRecord() def test_line_width(self): - ''' + """ Test each record to make sure they are 94 characters wide - ''' + """ nt.assert_equals(len(self.header.get_row()), 94) nt.assert_equals(len(self.file_control.get_row()), 94) nt.assert_equals(len(self.batch_header.get_row()), 94) @@ -30,32 +27,23 @@ def test_line_width(self): nt.assert_equals(len(self.addenda_record.get_row()), 94) def test_invalid_property_header(self): - ''' + """ We make sure that properties that are not define in "numeric_fields" or "alpha_numeric_fields" cannot be defined as object properties. - ''' - nt.assert_raises(dt.AchError, setattr, self.header, - 'test_property', 'testtesttest') - nt.assert_raises(dt.AchError, setattr, self.file_control, - 'test_property', 'testtesttest') - nt.assert_raises(dt.AchError, setattr, self.batch_header, - 'test_property', 'testtesttest') - nt.assert_raises(dt.AchError, setattr, self.batch_control, - 'test_property', 'testtesttest') - nt.assert_raises(dt.AchError, setattr, self.entry_detail, - 'test_property', 'testtesttest') - nt.assert_raises(dt.AchError, setattr, self.addenda_record, - 'test_property', 'testtesttest') + """ + nt.assert_raises(dt.AchError, setattr, self.header, "test_property", "testtesttest") + nt.assert_raises(dt.AchError, setattr, self.file_control, "test_property", "testtesttest") + nt.assert_raises(dt.AchError, setattr, self.batch_header, "test_property", "testtesttest") + nt.assert_raises(dt.AchError, setattr, self.batch_control, "test_property", "testtesttest") + nt.assert_raises(dt.AchError, setattr, self.entry_detail, "test_property", "testtesttest") + nt.assert_raises(dt.AchError, setattr, self.addenda_record, "test_property", "testtesttest") def test_check_digit(self): - ''' + """ Ensure our check digit is being calculate appropriately on entry detail records - ''' - self.entry_detail.recv_dfi_id = '11100002' + """ + self.entry_detail.recv_dfi_id = "11100002" self.entry_detail.calc_check_digit() - nt.assert_equal( - self.entry_detail.recv_dfi_id + self.entry_detail.check_digit, - '111000025' - ) + nt.assert_equal(self.entry_detail.recv_dfi_id + self.entry_detail.check_digit, "111000025") diff --git a/tests/test_line_endings.py b/tests/test_line_endings.py index 6c8d075..926febd 100644 --- a/tests/test_line_endings.py +++ b/tests/test_line_endings.py @@ -2,54 +2,55 @@ from ach.builder import AchFile + class TestLineEndings(object): def setup(self): self.settings = { - 'immediate_dest' : '123456780', - 'immediate_org' : '123456780', - 'immediate_dest_name' : 'YOUR BANK', - 'immediate_org_name' : 'YOUR COMPANY', - 'company_id' : '1234567890', #tax number + "immediate_dest": "123456780", + "immediate_org": "123456780", + "immediate_dest_name": "YOUR BANK", + "immediate_org_name": "YOUR COMPANY", + "company_id": "1234567890", # tax number } - self.ach_file = AchFile('A', self.settings) #file Id mod + self.ach_file = AchFile("A", self.settings) # file Id mod self.entries = [ { - 'type' : '22', # type of - 'routing_number' : '12345678', - 'account_number' : '11232132', - 'amount' : '10.00', - 'name' : 'Alice Wanderdust', - 'addenda' : [ + "type": "22", # type of + "routing_number": "12345678", + "account_number": "11232132", + "amount": "10.00", + "name": "Alice Wanderdust", + "addenda": [ { - 'payment_related_info': 'Here is some additional information', + "payment_related_info": "Here is some additional information", }, ], }, { - 'type' : '27', - 'routing_number' : '12345678', - 'account_number' : '234234234', - 'amount' : '150.00', - 'name' : 'Billy Holiday', + "type": "27", + "routing_number": "12345678", + "account_number": "234234234", + "amount": "150.00", + "name": "Billy Holiday", }, { - 'type' : '22', - 'routing_number' : '123232318', - 'account_number' : '123123123', - 'amount' : '12.13', - 'name' : 'Rachel Welch', + "type": "22", + "routing_number": "123232318", + "account_number": "123123123", + "amount": "12.13", + "name": "Rachel Welch", }, ] - self.ach_file.add_batch('PPD', self.entries, credits=True, debits=True) + self.ach_file.add_batch("PPD", self.entries, credits=True, debits=True) def test_normal(self): ach_output = self.ach_file.render_to_string() - rows = ach_output.split('\n') + rows = ach_output.split("\n") nt.assert_equals(len(rows), 10) for row in rows: nt.assert_equals(len(row), 94) @@ -57,7 +58,7 @@ def test_normal(self): def test_force_crlf(self): ach_output = self.ach_file.render_to_string(force_crlf=True) - rows = ach_output.split('\r\n') + rows = ach_output.split("\r\n") nt.assert_equals(len(rows), 10) for row in rows: nt.assert_equals(len(row), 94) From 4a4adf60132a929f24ebf6940629d3d764d6ee88 Mon Sep 17 00:00:00 2001 From: James Uejio Date: Wed, 7 Apr 2021 11:46:20 -0400 Subject: [PATCH 4/8] Update Parser to work for IAT transactions --- ach/parser.py | 206 ++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 191 insertions(+), 15 deletions(-) diff --git a/ach/parser.py b/ach/parser.py index 155aaaf..15a663b 100644 --- a/ach/parser.py +++ b/ach/parser.py @@ -3,7 +3,168 @@ from io import StringIO -class Parser(object): +class SECCode: + IAT = "IAT" + IAT_NOC = "COR" + + +class IATParserConfig: + ENTRY_DETAIL_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "transaction_code", "pos": 1, "len": 2}, + {"field": "recv_dfi_id", "pos": 3, "len": 9}, + {"field": "num_addenda", "pos": 12, "len": 4}, + {"field": "reserved_one", "pos": 16, "len": 13}, + {"field": "amount", "pos": 29, "len": 10}, + {"field": "dfi_acnt_num", "pos": 39, "len": 35}, + {"field": "reserved_two", "pos": 74, "len": 2}, + {"field": "ofac_screen_ind", "pos": 76, "len": 1}, + {"field": "ofac_screen_ind_two", "pos": 77, "len": 1}, + {"field": "add_rec_ind", "pos": 78, "len": 1}, + {"field": "trace_number", "pos": 79, "len": 15}, + ] + + BATCH_HEADER_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "serv_cls_code", "pos": 1, "len": 3}, + {"field": "iat_indicator", "pos": 4, "len": 16}, + {"field": "foreign_exchange_indicator", "pos": 20, "len": 2}, + {"field": "foreign_exchange_ref_indicator", "pos": 22, "len": 1}, + {"field": "iso_country_code", "pos": 38, "len": 2}, + {"field": "orig_id", "pos": 40, "len": 10}, + {"field": "std_ent_cls_code", "pos": 50, "len": 3}, + {"field": "entry_desc", "pos": 53, "len": 10}, + {"field": "iso_orig_currency_code", "pos": 63, "len": 3}, + {"field": "iso_dest_currency_code", "pos": 66, "len": 3}, + {"field": "eff_ent_date", "pos": 69, "len": 6}, + {"field": "settlement_date", "pos": 75, "len": 3}, + {"field": "orig_stat_code", "pos": 78, "len": 1}, + {"field": "orig_dfi_id", "pos": 79, "len": 8}, + {"field": "batch_id", "pos": 87, "len": 7}, + ] + + ADDENDA_710_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "transaction_type_code", "pos": 3, "len": 3}, + {"field": "foreign_amount", "pos": 6, "len": 18}, + {"field": "foreign_trace_number", "pos": 24, "len": 22}, + {"field": "rec_name", "pos": 46, "len": 35}, + {"field": "reserved", "pos": 81, "len": 6}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_711_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "orig_name", "pos": 3, "len": 35}, + {"field": "orig_street_address", "pos": 38, "len": 35}, + {"field": "reserved", "pos": 73, "len": 14}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_712_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "orig_city_state", "pos": 3, "len": 35}, + {"field": "orig_country_postal", "pos": 38, "len": 35}, + {"field": "reserved", "pos": 73, "len": 14}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_713_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "orig_dfi_name", "pos": 3, "len": 35}, + {"field": "orig_dfi_id_qualifier", "pos": 38, "len": 2}, + {"field": "orig_dfi_id", "pos": 40, "len": 34}, + {"field": "orig_dfi_branch_cc", "pos": 74, "len": 3}, + {"field": "reserved", "pos": 77, "len": 10}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_714_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "rec_dfi_name", "pos": 3, "len": 35}, + {"field": "rec_dfi_id_qualifier", "pos": 38, "len": 2}, + {"field": "rec_dfi_id", "pos": 40, "len": 34}, + {"field": "rec_dfi_branch_cc", "pos": 74, "len": 3}, + {"field": "reserved", "pos": 77, "len": 10}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_715_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "rec_id", "pos": 3, "len": 15}, + {"field": "rec_street_address", "pos": 18, "len": 35}, + {"field": "reserved", "pos": 53, "len": 34}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_716_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "rec_city_state", "pos": 3, "len": 35}, + {"field": "rec_country_postal", "pos": 38, "len": 35}, + {"field": "reserved", "pos": 73, "len": 14}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_717_RECORD_DEF = [ + { + "field": "record_type_code", + "pos": 0, + "len": 1, + }, + { + "field": "addenda_type_code", + "pos": 1, + "len": 2, + }, + { + "field": "pmt_rel_info", + "pos": 3, + "len": 80, + }, + { + "field": "add_seq_num", + "pos": 83, + "len": 4, + }, + { + "field": "ent_det_seq_num", + "pos": 87, + "len": 7, + }, + ] + ADDENDA_718_RECORD_DEF = [ + {"field": "record_type_code", "pos": 0, "len": 1}, + {"field": "addenda_type_code", "pos": 1, "len": 2}, + {"field": "correspondent_bank_name", "pos": 3, "len": 35}, + {"field": "correspondent_bank_id_qual", "pos": 38, "len": 2}, + {"field": "correspondent_bank_id", "pos": 40, "len": 34}, + {"field": "correspondent_bank_country", "pos": 74, "len": 3}, + {"field": "reserved", "pos": 77, "len": 6}, + {"field": "add_seq_num", "pos": 83, "len": 4}, + {"field": "ent_det_seq_num", "pos": 87, "len": 7}, + ] + + ADDENDA_TYPE_TO_RECORD_DEF = { + "710": ADDENDA_710_RECORD_DEF, + "711": ADDENDA_711_RECORD_DEF, + "712": ADDENDA_712_RECORD_DEF, + "713": ADDENDA_713_RECORD_DEF, + "714": ADDENDA_714_RECORD_DEF, + "715": ADDENDA_715_RECORD_DEF, + "716": ADDENDA_716_RECORD_DEF, + "717": ADDENDA_717_RECORD_DEF, + "718": ADDENDA_718_RECORD_DEF, + } + + +class Parser(IATParserConfig): """ Parser for ACH files """ @@ -415,11 +576,10 @@ def __parse_file(self): batch_info = self.__get_batch_info() self.__parse_batches(batch_info) - def __parse_line(self, line, record_type): - defintions = getattr(self, record_type) + def __parse_line(self, line, definitions): record_data = {} - for rule in defintions: + for rule in definitions: value = line[rule["pos"] : rule["pos"] + rule["len"]] record_data[rule["field"]] = value @@ -429,14 +589,14 @@ def __parse_file_header(self): for line in self.ach_lines: if line: if line[0] == self.FILE_HEADER: - self.ach_data["file_header"] = self.__parse_line(line, "FILE_HEADER_DEF") + self.ach_data["file_header"] = self.__parse_line(line, self.FILE_HEADER_DEF) break def __parse_file_control(self): for line in self.ach_lines: if line: if line[0] == self.FILE_CONTROL: - self.ach_data["file_control"] = self.__parse_line(line, "FILE_CONTROL_DEF") + self.ach_data["file_control"] = self.__parse_line(line, self.FILE_CONTROL_DEF) break def __get_batch_info(self): @@ -459,11 +619,19 @@ def __parse_batches(self, batch_info): self.ach_data["batches"] = [] for batch in batch_info: + batch_header = self.__parse_line(self.ach_lines[batch["batch_header_line"]], self.BATCH_HEADER_DEF) + + sec_code = batch_header["std_ent_cls_code"] + if sec_code == SECCode.IAT: + batch_header = self.__parse_line( + self.ach_lines[batch["batch_header_line"]], IATParserConfig.BATCH_HEADER_DEF + ) + self.ach_data["batches"].append( { - "batch_header": self.__parse_line(self.ach_lines[batch["batch_header_line"]], "BATCH_HEADER_DEF"), + "batch_header": batch_header, "batch_control": self.__parse_line( - self.ach_lines[batch["batch_control_line"]], "BATCH_CONTROL_DEF" + self.ach_lines[batch["batch_control_line"]], self.BATCH_CONTROL_DEF ), "entries": [], } @@ -478,13 +646,21 @@ def __parse_batches(self, batch_info): cur_entry = len(self.ach_data["batches"][cur_batch]["entries"]) - 1 if self.ach_lines[line_num][0] == self.ENTRY_DETAIL: + if sec_code == SECCode.IAT: + entry_detail = self.__parse_line(self.ach_lines[line_num], IATParserConfig.ENTRY_DETAIL_DEF) + else: + entry_detail = self.__parse_line(self.ach_lines[line_num], self.ENTRY_DETAIL_DEF) + self.ach_data["batches"][cur_batch]["entries"].append( - { - "entry_detail": self.__parse_line(self.ach_lines[line_num], "ENTRY_DETAIL_DEF"), - "addenda": [], - } + {"entry_detail": entry_detail, "addenda": []} ) if self.ach_lines[line_num][0] == self.ADDENDA_RECORD: - self.ach_data["batches"][cur_batch]["entries"][cur_entry]["addenda"].append( - self.__parse_line(self.ach_lines[line_num], "ADDENDA_RECORD_DEF") - ) + if sec_code == SECCode.IAT: + addenda_type = self.ach_lines[line_num][:3] + definitions = IATParserConfig.ADDENDA_TYPE_TO_RECORD_DEF.get(addenda_type) + if definitions: + addenda_detail = self.__parse_line(self.ach_lines[line_num], definitions) + else: + addenda_detail = self.__parse_line(self.ach_lines[line_num], self.ADDENDA_RECORD_DEF) + + self.ach_data["batches"][cur_batch]["entries"][cur_entry]["addenda"].append(addenda_detail) From fe837417cc02936967ef20642d7f04a3a518ab23 Mon Sep 17 00:00:00 2001 From: Terraform Date: Fri, 12 Dec 2025 21:13:50 +0000 Subject: [PATCH 5/8] chore: update terraformed-claude.yml workflow [terraform-managed] --- .github/workflows/terraformed-claude.yml | 34 ++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 .github/workflows/terraformed-claude.yml diff --git a/.github/workflows/terraformed-claude.yml b/.github/workflows/terraformed-claude.yml new file mode 100644 index 0000000..5dff294 --- /dev/null +++ b/.github/workflows/terraformed-claude.yml @@ -0,0 +1,34 @@ +# This workflow is managed by terraform-github +# Source: https://github.com/carta/terraform-github/blob/main/carta/tf/workflow_files.tf +# DO NOT EDIT THIS FILE DIRECTLY - changes will be overwritten by Terraform +# +# To modify workflow behavior, update the reusable workflow at: +# https://github.com/carta/.github/blob/main/.github/workflows/claude-code-assistant.yml + +name: Claude Assistant + +on: + pull_request: + types: [opened, synchronize, ready_for_review] + issue_comment: + types: [created] + pull_request_review_comment: + types: [created] + pull_request_review: + types: [submitted] + issues: + types: [opened, assigned] + workflow_dispatch: + inputs: + pr_number: + description: 'PR number to review (optional - auto-detects from branch if not provided)' + required: false + type: number + +jobs: + claude: + uses: carta/.github/.github/workflows/claude-code-assistant.yml@main + # secrets: inherit is GitHub's recommended pattern for reusable workflows + # The reusable workflow can only access secrets that exist in this repo + # See: https://docs.github.com/en/actions/using-workflows/reusing-workflows#using-inputs-and-secrets-in-a-reusable-workflow + secrets: inherit From 293e7e86d46f2c37e64fd4c823c31c7bb96b8f6f Mon Sep 17 00:00:00 2001 From: Terraform Date: Thu, 5 Feb 2026 22:16:59 +0000 Subject: [PATCH 6/8] chore: update terraformed-claude.yml workflow [terraform-managed] --- .github/workflows/terraformed-claude.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/terraformed-claude.yml b/.github/workflows/terraformed-claude.yml index 5dff294..981132f 100644 --- a/.github/workflows/terraformed-claude.yml +++ b/.github/workflows/terraformed-claude.yml @@ -9,7 +9,7 @@ name: Claude Assistant on: pull_request: - types: [opened, synchronize, ready_for_review] + types: [opened, synchronize, ready_for_review, review_requested] issue_comment: types: [created] pull_request_review_comment: From 01e5c22b39c8a6844afd6d8b9ba530a9d46b6701 Mon Sep 17 00:00:00 2001 From: Terraform Date: Wed, 11 Feb 2026 21:03:53 +0000 Subject: [PATCH 7/8] chore: update terraformed-claude.yml workflow [terraform-managed] --- .github/workflows/terraformed-claude.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/terraformed-claude.yml b/.github/workflows/terraformed-claude.yml index 981132f..c2d9303 100644 --- a/.github/workflows/terraformed-claude.yml +++ b/.github/workflows/terraformed-claude.yml @@ -14,8 +14,10 @@ on: types: [created] pull_request_review_comment: types: [created] - pull_request_review: - types: [submitted] + # NOTE: pull_request_review is intentionally excluded. Claude's own review + # submissions re-trigger this workflow, creating "skipped" ghost runs that + # overwrite the real check status. @claude in review comments is still + # supported via pull_request_review_comment above. issues: types: [opened, assigned] workflow_dispatch: From e04d5b59f43fc95b01a33b39d101e211f3160f5c Mon Sep 17 00:00:00 2001 From: Terraform Date: Wed, 11 Feb 2026 22:32:55 +0000 Subject: [PATCH 8/8] chore: update terraformed-claude.yml workflow [terraform-managed] --- .github/workflows/terraformed-claude.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/terraformed-claude.yml b/.github/workflows/terraformed-claude.yml index c2d9303..8c20435 100644 --- a/.github/workflows/terraformed-claude.yml +++ b/.github/workflows/terraformed-claude.yml @@ -9,7 +9,7 @@ name: Claude Assistant on: pull_request: - types: [opened, synchronize, ready_for_review, review_requested] + types: [opened, synchronize, ready_for_review] issue_comment: types: [created] pull_request_review_comment: