rem
stringlengths 1
322k
| add
stringlengths 0
2.05M
| context
stringlengths 4
228k
| meta
stringlengths 156
215
|
---|---|---|---|
amount = cur_obj.compute(cr, uid, currency_id, company_currency, amount*tax_sign, context={'date': date_invoice or time.strftime('%Y-%m-%d')}, round=False) | amount = cur_obj.compute(cr, uid, currency_id, company_currency, amount*factor, context={'date': date_invoice or time.strftime('%Y-%m-%d')}, round=False) | def amount_change(self, cr, uid, ids, amount, currency_id=False, company_id=False, date_invoice=False): cur_obj = self.pool.get('res.currency') company_obj = self.pool.get('res.company') company_currency = False tax_sign = 1 if ids: tax_amount = self.read(cr, uid, ids[0], ['tax_amount'])['tax_amount'] if tax_amount < 0: tax_sign = -1 elif tax_amount == 0: tax_sign = 0 if company_id: company_currency = company_obj.read(cr, uid, [company_id], ['currency_id'])[0]['currency_id'][0] if currency_id and company_currency: amount = cur_obj.compute(cr, uid, currency_id, company_currency, amount*tax_sign, context={'date': date_invoice or time.strftime('%Y-%m-%d')}, round=False) return {'value': {'tax_amount': amount}} | f76c611d865dccbabe5295564296e8672a5cace7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f76c611d865dccbabe5295564296e8672a5cace7/invoice.py |
'name': fields.char('Name', size=64, required=True), | 'name': fields.char('Name', size=64, required=True, states={'confirm': [('readonly', True)]}), | def _currency(self, cursor, user, ids, name, args, context=None): res = {} res_currency_obj = self.pool.get('res.currency') res_users_obj = self.pool.get('res.users') default_currency = res_users_obj.browse(cursor, user, user, context=context).company_id.currency_id for statement in self.browse(cursor, user, ids, context=context): currency = statement.journal_id.currency if not currency: currency = default_currency res[statement.id] = currency.id currency_names = {} for currency_id, currency_name in res_currency_obj.name_get(cursor, user, [x for x in res.values()], context=context): currency_names[currency_id] = currency_name for statement_id in res.keys(): currency_id = res[statement_id] res[statement_id] = (currency_id, currency_names[currency_id]) return res | dcce6d645d1174bec97fb3e563a914fb7a01dfa7 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/dcce6d645d1174bec97fb3e563a914fb7a01dfa7/account_bank_statement.py |
'email': cal_data.value.replace('MAILTO:',''), | 'email': cal_data.value.lower().replace('mailto:',''), | def parse_ics(self, cr, uid, child, cal_children=None, context=None): """ parse calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ | 912caa49c4337d0be9e0a4eadcbd44904d06171b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/912caa49c4337d0be9e0a4eadcbd44904d06171b/calendar.py |
if valid_moves: for move in self.browse(cr, uid, valid_moves): if move.name =='/': new_name = False journal = move.journal_id if invoice and invoice.internal_number: new_name = invoice.internal_number | if not valid_moves: raise osv.except_osv(_('Integrity Error !'), _('You cannot validate a non-balanced entry !\nMake sure you have configured Payment Term properly !\nIt should contain atleast one Payment Term Line with type "Balance" !')) for move in self.browse(cr, uid, valid_moves): if move.name =='/': new_name = False journal = move.journal_id if invoice and invoice.internal_number: new_name = invoice.internal_number else: if journal.sequence_id: c = {'fiscalyear_id': move.period_id.fiscalyear_id.id} new_name = self.pool.get('ir.sequence').get_id(cr, uid, journal.sequence_id.id, context=c) | def post(self, cr, uid, ids, context=None): invoice = context.get('invoice', False) valid_moves = self.validate(cr, uid, ids, context) if valid_moves: for move in self.browse(cr, uid, valid_moves): if move.name =='/': new_name = False journal = move.journal_id | 417c87c92886c45e52c7c6f093fb17871bde13cd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/417c87c92886c45e52c7c6f093fb17871bde13cd/account.py |
if journal.sequence_id: c = {'fiscalyear_id': move.period_id.fiscalyear_id.id} new_name = self.pool.get('ir.sequence').get_id(cr, uid, journal.sequence_id.id, context=c) else: raise osv.except_osv(_('Error'), _('No sequence defined in the journal !')) if new_name: self.write(cr, uid, [move.id], {'name':new_name}) cr.execute('UPDATE account_move '\ 'SET state=%s '\ 'WHERE id IN %s', ('posted', tuple(valid_moves),)) else: raise osv.except_osv(_('Integrity Error !'), _('You can not validate a non-balanced entry !\nMake sure you have configured Payment Term properly !\nIt should contain atleast one Payment Term Line with type "Balance" !')) | raise osv.except_osv(_('Error'), _('No sequence defined on the journal !')) if new_name: self.write(cr, uid, [move.id], {'name':new_name}) cr.execute('UPDATE account_move '\ 'SET state=%s '\ 'WHERE id IN %s', ('posted', tuple(valid_moves),)) | def post(self, cr, uid, ids, context=None): invoice = context.get('invoice', False) valid_moves = self.validate(cr, uid, ids, context) if valid_moves: for move in self.browse(cr, uid, valid_moves): if move.name =='/': new_name = False journal = move.journal_id | 417c87c92886c45e52c7c6f093fb17871bde13cd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/417c87c92886c45e52c7c6f093fb17871bde13cd/account.py |
amount = currency_obj.compute(cursor, user, line.currency_id.id, | amount = currency_obj.compute(cr, user, line.currency_id.id, | def populate_statement(self, cr, uid, ids, context=None): | 082fce194baa704029958fd92440beb160f6db69 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/082fce194baa704029958fd92440beb160f6db69/account_statement_from_invoice.py |
amount = currency_obj.compute(cursor, user, line.invoice.currency_id.id, | amount = currency_obj.compute(cr, user, line.invoice.currency_id.id, | def populate_statement(self, cr, uid, ids, context=None): | 082fce194baa704029958fd92440beb160f6db69 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/082fce194baa704029958fd92440beb160f6db69/account_statement_from_invoice.py |
rec = anal_def_obj.account_get(cr, uid, line.product_id.id, sale_line.order_id.partner_id.id, uid, time.strftime('%Y-%m-%d'), context) | rec = anal_def_obj.account_get(cr, uid, line.product_id.id, sale_line.order_id.partner_id.id, sale_line.order_id.user_id.id, time.strftime('%Y-%m-%d'), context) | def invoice_line_create(self, cr, uid, ids, context=None): create_ids = super(sale_order_line,self).invoice_line_create(cr, uid, ids, context) if not ids: return create_ids sale_line = self.browse(cr, uid, ids[0], context) inv_line_obj = self.pool.get('account.invoice.line') anal_def_obj = self.pool.get('account.analytic.default') for line in inv_line_obj.browse(cr, uid, create_ids, context): rec = anal_def_obj.account_get(cr, uid, line.product_id.id, sale_line.order_id.partner_id.id, uid, time.strftime('%Y-%m-%d'), context) | 9161b4c4c70ee3696770fb80d9fd06e15f823dbf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/9161b4c4c70ee3696770fb80d9fd06e15f823dbf/account_analytic_default.py |
elif method in ['about', 'timezone_get', 'get_server_environment', 'login_message', 'get_stats' ]: | elif method in ['about', 'timezone_get', 'get_server_environment', 'login_message','get_stats', 'check_connectivity']: | def dispatch(self, method, auth, params): logger = netsvc.Logger() if method in [ 'ir_set','ir_del', 'ir_get' ]: return self.common_dispatch(method,auth,params) if method == 'login': # At this old dispatcher, we do NOT update the auth proxy res = security.login(params[0], params[1], params[2]) msg = res and 'successful login' or 'bad login or password' # TODO log the client ip address.. logger.notifyChannel("web-service", netsvc.LOG_INFO, "%s from '%s' using database '%s'" % (msg, params[1], params[0].lower())) return res or False elif method == 'logout': if auth: auth.logout(params[1]) logger.notifyChannel("web-service", netsvc.LOG_INFO,'Logout %s from database %s'%(login,db)) return True elif method in ['about', 'timezone_get', 'get_server_environment', 'login_message', 'get_stats' ]: pass elif method in ['get_available_updates', 'get_migration_scripts', 'set_loglevel']: passwd = params[0] params = params[1:] security.check_super(passwd) else: raise Exception("Method not found: %s" % method) fn = getattr(self, 'exp_'+method) return fn(*params) | c95bd0a29e732edea53e77525b950356e924ceda /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c95bd0a29e732edea53e77525b950356e924ceda/web_services.py |
def check_connectivity(self): | def exp_check_connectivity(self): | def check_connectivity(self): return bool(sql_db.db_connect('template1')) | c95bd0a29e732edea53e77525b950356e924ceda /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c95bd0a29e732edea53e77525b950356e924ceda/web_services.py |
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) | self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']]) | def create(self, cursor, user, vals, context=None): if not context: context = {} ids = super(ir_translation, self).create(cursor, user, vals, context=context) for trans_obj in self.read(cursor, user, [ids], ['name','type','res_id','src','lang'], context=context): self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src']) self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) return ids | d67b6eccda0e5e7089cacd9a87e96452e1742151 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d67b6eccda0e5e7089cacd9a87e96452e1742151/ir_translation.py |
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) | self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']]) | def write(self, cursor, user, ids, vals, context=None): if not context: context = {} result = super(ir_translation, self).write(cursor, user, ids, vals, context=context) for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context): self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src']) self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) return result | d67b6eccda0e5e7089cacd9a87e96452e1742151 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d67b6eccda0e5e7089cacd9a87e96452e1742151/ir_translation.py |
self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) | self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], [trans_obj['res_id']]) | def unlink(self, cursor, user, ids, context=None): if not context: context = {} for trans_obj in self.read(cursor, user, ids, ['name','type','res_id','src','lang'], context=context): self._get_source.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], trans_obj['lang'], source=trans_obj['src']) self._get_ids.clear_cache(cursor.dbname, user, trans_obj['name'], trans_obj['type'], context.get('lang','en_US'), [trans_obj['res_id']]) result = super(ir_translation, self).unlink(cursor, user, ids, context=context) return result | d67b6eccda0e5e7089cacd9a87e96452e1742151 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d67b6eccda0e5e7089cacd9a87e96452e1742151/ir_translation.py |
db = pooler.get_db_only(db_name) cr = db.cursor() cr.execute("select id from ir_module_module where name = 'document' and state='installed' ") res=cr.fetchone() if res and len(res): self.db_name_list.append(db_name) cr.close() | cr = None try: db = pooler.get_db_only(db_name) cr = db.cursor() cr.execute("SELECT id FROM ir_module_module WHERE name = 'document' AND state='installed' ") res=cr.fetchone() if res and len(res): self.db_name_list.append(db_name) except Exception, e: self.parent.log_error("Exception in db list: %s" % e) finally: if cr: cr.close() | def db_list(self): s = netsvc.ExportService.getService('db') result = s.exp_list() self.db_name_list=[] for db_name in result: db = pooler.get_db_only(db_name) cr = db.cursor() cr.execute("select id from ir_module_module where name = 'document' and state='installed' ") res=cr.fetchone() if res and len(res): self.db_name_list.append(db_name) cr.close() return self.db_name_list | 0be3a0757ca31d9abe8f78f86c3606200d546941 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0be3a0757ca31d9abe8f78f86c3606200d546941/dav_fs.py |
class res_users(osv.osv): _inherit = 'res.users' def _get_group(self, cr, uid, context=None): result = super(res_users, self)._get_group(cr, uid, context=context) dataobj = self.pool.get('ir.model.data') try: dummy,group_id = dataobj.get_object_reference(cr, 1, 'product', 'group_product_manager') result.append(group_id) except ValueError: pass return result _defaults = { 'groups_id': _get_group, } res_users() | def price_get(self, cr, uid, supplier_ids, product_id, product_qty=1, context=None): """ Calculate price from supplier pricelist. @param supplier_ids: Ids of res.partner object. @param product_id: Id of product. @param product_qty: specify quantity to purchase. """ if type(supplier_ids) in (int,long,): supplier_ids = [supplier_ids] res = {} product_pool = self.pool.get('product.product') partner_pool = self.pool.get('res.partner') pricelist_pool = self.pool.get('product.pricelist') currency_pool = self.pool.get('res.currency') currency_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id for supplier in partner_pool.browse(cr, uid, supplier_ids, context=context): # Compute price from standard price of product price = product_pool.price_get(cr, uid, [product_id], 'standard_price')[product_id] | 1d5a2275c8146a0e629ce067fe88a67f16d07e00 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/1d5a2275c8146a0e629ce067fe88a67f16d07e00/product.py |
|
if hours.get(task.id, 0.0) > res[task.id]['total_hours']: res[task.id]['progress'] = round(100 - (max(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 100)) % 100, 2) | if task.state =='done': res[task.id]['progress'] = 100.0 | def _hours_get(self, cr, uid, ids, field_names, args, context=None): res = {} cr.execute("SELECT task_id, COALESCE(SUM(hours),0) FROM project_task_work WHERE task_id =ANY(%s) GROUP BY task_id",(ids,)) hours = dict(cr.fetchall()) for task in self.browse(cr, uid, ids, context=context): res[task.id] = {'effective_hours': hours.get(task.id, 0.0), 'total_hours': task.remaining_hours + hours.get(task.id, 0.0)} if (task.remaining_hours + hours.get(task.id, 0.0)): if hours.get(task.id, 0.0) > res[task.id]['total_hours']: res[task.id]['progress'] = round(100 - (max(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 100)) % 100, 2) else: res[task.id]['progress'] = round(min(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 100),2) else: res[task.id]['progress'] = 0.0 if task.state in ('done','cancel'): res[task.id]['progress'] = 100.0 res[task.id]['delay_hours'] = res[task.id]['total_hours'] - task.planned_hours return res | 482c260053ce958a0c30bbe1404d5d7d8d6b88b6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/482c260053ce958a0c30bbe1404d5d7d8d6b88b6/project.py |
res[task.id]['progress'] = round(min(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 100),2) | res[task.id]['progress'] = round(min(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 99.99),2) | def _hours_get(self, cr, uid, ids, field_names, args, context=None): res = {} cr.execute("SELECT task_id, COALESCE(SUM(hours),0) FROM project_task_work WHERE task_id =ANY(%s) GROUP BY task_id",(ids,)) hours = dict(cr.fetchall()) for task in self.browse(cr, uid, ids, context=context): res[task.id] = {'effective_hours': hours.get(task.id, 0.0), 'total_hours': task.remaining_hours + hours.get(task.id, 0.0)} if (task.remaining_hours + hours.get(task.id, 0.0)): if hours.get(task.id, 0.0) > res[task.id]['total_hours']: res[task.id]['progress'] = round(100 - (max(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 100)) % 100, 2) else: res[task.id]['progress'] = round(min(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 100),2) else: res[task.id]['progress'] = 0.0 if task.state in ('done','cancel'): res[task.id]['progress'] = 100.0 res[task.id]['delay_hours'] = res[task.id]['total_hours'] - task.planned_hours return res | 482c260053ce958a0c30bbe1404d5d7d8d6b88b6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/482c260053ce958a0c30bbe1404d5d7d8d6b88b6/project.py |
for root, _, names in os.walk(join('bin','addons')): files.append((root, [join(root, name) for name in names])) | os.chdir('bin') for (dp, dn, names) in os.walk('addons'): files.append((dp, map(lambda x: join('bin', dp, x), names))) os.chdir('..') | def data_files(): '''Build list of data files to be installed''' files = [] if os.name == 'nt': for root, _, names in os.walk(join('bin','addons')): files.append((root, [join(root, name) for name in names])) for root, _, names in os.walk('doc'): files.append((root, [join(root, name) for name in names])) for root, _, names in os.walk('pixmaps'): files.append((root, [join(root, name) for name in names])) files.append(('.', [join('bin', 'import_xml.rng'), join('bin', 'server.pkey'), join('bin', 'server.cert')])) else: man_directory = join('share', 'man') files.append((join(man_directory, 'man1'), ['man/openerp-server.1'])) files.append((join(man_directory, 'man5'), ['man/openerp_serverrc.5'])) doc_directory = join('share', 'doc', 'openerp-server-%s' % version) files.append((doc_directory, filter(isfile, glob.glob('doc/*')))) files.append((join(doc_directory, 'migrate', '3.3.0-3.4.0'), filter(isfile, glob.glob('doc/migrate/3.3.0-3.4.0/*')))) files.append((join(doc_directory, 'migrate', '3.4.0-4.0.0'), filter(isfile, glob.glob('doc/migrate/3.4.0-4.0.0/*')))) openerp_site_packages = join(get_python_lib(prefix=''), 'openerp-server') files.append((openerp_site_packages, [join('bin', 'import_xml.rng'), join('bin', 'server.pkey'), join('bin', 'server.cert')])) if sys.version_info[0:2] == (2,5): files.append((openerp_site_packages, [ join('python25-compat','BaseHTTPServer.py'), join('python25-compat','SimpleXMLRPCServer.py'), join('python25-compat','SocketServer.py')])) for addonname, add_path in find_addons(): addon_path = join(get_python_lib(prefix=''), 'openerp-server','addons', addonname) for root, dirs, innerfiles in os.walk(add_path): innerfiles = filter(lambda fil: os.path.splitext(fil)[1] not in ('.pyc', '.pyd', '.pyo'), innerfiles) if innerfiles: res = os.path.normpath(join(addon_path, root.replace(join(add_path), '.'))) files.extend(((res, map(lambda fil: join(root, fil), innerfiles)),)) return files | b4a68aa716a04c045203cd1c343cdc0fe7fb5dee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b4a68aa716a04c045203cd1c343cdc0fe7fb5dee/setup.py |
for root, _, names in os.walk('pixmaps'): files.append((root, [join(root, name) for name in names])) | def data_files(): '''Build list of data files to be installed''' files = [] if os.name == 'nt': for root, _, names in os.walk(join('bin','addons')): files.append((root, [join(root, name) for name in names])) for root, _, names in os.walk('doc'): files.append((root, [join(root, name) for name in names])) for root, _, names in os.walk('pixmaps'): files.append((root, [join(root, name) for name in names])) files.append(('.', [join('bin', 'import_xml.rng'), join('bin', 'server.pkey'), join('bin', 'server.cert')])) else: man_directory = join('share', 'man') files.append((join(man_directory, 'man1'), ['man/openerp-server.1'])) files.append((join(man_directory, 'man5'), ['man/openerp_serverrc.5'])) doc_directory = join('share', 'doc', 'openerp-server-%s' % version) files.append((doc_directory, filter(isfile, glob.glob('doc/*')))) files.append((join(doc_directory, 'migrate', '3.3.0-3.4.0'), filter(isfile, glob.glob('doc/migrate/3.3.0-3.4.0/*')))) files.append((join(doc_directory, 'migrate', '3.4.0-4.0.0'), filter(isfile, glob.glob('doc/migrate/3.4.0-4.0.0/*')))) openerp_site_packages = join(get_python_lib(prefix=''), 'openerp-server') files.append((openerp_site_packages, [join('bin', 'import_xml.rng'), join('bin', 'server.pkey'), join('bin', 'server.cert')])) if sys.version_info[0:2] == (2,5): files.append((openerp_site_packages, [ join('python25-compat','BaseHTTPServer.py'), join('python25-compat','SimpleXMLRPCServer.py'), join('python25-compat','SocketServer.py')])) for addonname, add_path in find_addons(): addon_path = join(get_python_lib(prefix=''), 'openerp-server','addons', addonname) for root, dirs, innerfiles in os.walk(add_path): innerfiles = filter(lambda fil: os.path.splitext(fil)[1] not in ('.pyc', '.pyd', '.pyo'), innerfiles) if innerfiles: res = os.path.normpath(join(addon_path, root.replace(join(add_path), '.'))) files.extend(((res, map(lambda fil: join(root, fil), innerfiles)),)) return files | b4a68aa716a04c045203cd1c343cdc0fe7fb5dee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b4a68aa716a04c045203cd1c343cdc0fe7fb5dee/setup.py |
|
'.'.join(['openerp-server'] + package.split('.')[1:]) for package in find_packages() | '.'.join(['openerp-server'] + package.split('.')[1:]) for package in find_packages() | def run(self): # create startup script start_script = "#!/bin/sh\ncd %s\nexec %s ./openerp-server.py $@\n"\ % (join(self.install_libbase, "openerp-server"), sys.executable) # write script f = open('openerp-server', 'w') f.write(start_script) f.close() install.run(self) | b4a68aa716a04c045203cd1c343cdc0fe7fb5dee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b4a68aa716a04c045203cd1c343cdc0fe7fb5dee/setup.py |
install_requires = ['lxml', 'mako', 'python-dateutil', 'psycopg2', 'pychart', 'pydot', 'pytz', 'reportlab', 'caldav', 'pyyaml', 'sqlalchemy', 'django', 'pywebdav' 'cx_Oracle', 'mysqldb', 'feedparser', 'bsddb3', 'egenix-mx-base'], | install_requires = [ 'lxml', 'mako', 'python-dateutil', 'psycopg2', 'pychart', 'pydot', 'pytz', 'reportlab', 'caldav', 'pyyaml', 'pywebdav' 'feedparser', 'egenix-mx-base' ], | def run(self): # create startup script start_script = "#!/bin/sh\ncd %s\nexec %s ./openerp-server.py $@\n"\ % (join(self.install_libbase, "openerp-server"), sys.executable) # write script f = open('openerp-server', 'w') f.write(start_script) f.close() install.run(self) | b4a68aa716a04c045203cd1c343cdc0fe7fb5dee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b4a68aa716a04c045203cd1c343cdc0fe7fb5dee/setup.py |
if has_py2exe: import pytz import zipfile assert (pytz.__file__.endswith('__init__.pyc') or pytz.__file__.endswith('__init__.py')), pytz.__file__ zoneinfo_dir = join(os.path.dirname(pytz.__file__), 'zoneinfo') disk_basedir = os.path.dirname(os.path.dirname(pytz.__file__)) zipfile_path = join(options['py2exe']['dist_dir'], 'library.zip') z = zipfile.ZipFile(zipfile_path, 'a') for absdir, directories, filenames in os.walk(zoneinfo_dir): assert absdir.startswith(disk_basedir), (absdir, disk_basedir) zip_dir = absdir[len(disk_basedir):] for f in filenames: z.write(join(absdir, f), join(zip_dir, f)) z.close() | def run(self): # create startup script start_script = "#!/bin/sh\ncd %s\nexec %s ./openerp-server.py $@\n"\ % (join(self.install_libbase, "openerp-server"), sys.executable) # write script f = open('openerp-server', 'w') f.write(start_script) f.close() install.run(self) | b4a68aa716a04c045203cd1c343cdc0fe7fb5dee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b4a68aa716a04c045203cd1c343cdc0fe7fb5dee/setup.py |
|
ids = map(lambda x:int(x), ids) | def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'): if not context: context = {} if not ids: return [] ids = map(lambda x:int(x), ids) if fields_to_read == None: fields_to_read = self._columns.keys() | be66abf49fba1d6ff979c8bd88896e120d624470 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/be66abf49fba1d6ff979c8bd88896e120d624470/orm.py |
|
event.add('dtend').value = ics_datetime(event_obj.date_deadline) | if not event_obj.date_deadline[10:]: short = True else: short = False event.add('dtend').value = ics_datetime(event_obj.date_deadline, short) | def ics_datetime(idate, short=False): if idate: if short: return date.fromtimestamp(time.mktime(time.strptime(idate, '%Y-%m-%d'))) else: return datetime.strptime(idate, '%Y-%m-%d %H:%M:%S') else: return False | 80300fc795964e715fd14c7f30f6cc3f7f94d825 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/80300fc795964e715fd14c7f30f6cc3f7f94d825/base_calendar.py |
def ics_datetime(idate, short=False): if idate: if short: return date.fromtimestamp(time.mktime(time.strptime(idate, '%Y-%m-%d'))) else: return datetime.strptime(idate, '%Y-%m-%d %H:%M:%S') else: return False | 80300fc795964e715fd14c7f30f6cc3f7f94d825 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/80300fc795964e715fd14c7f30f6cc3f7f94d825/base_calendar.py |
||
def ics_datetime(idate, short=False): if idate: if short: return date.fromtimestamp(time.mktime(time.strptime(idate, '%Y-%m-%d'))) else: return datetime.strptime(idate, '%Y-%m-%d %H:%M:%S') else: return False | 80300fc795964e715fd14c7f30f6cc3f7f94d825 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/80300fc795964e715fd14c7f30f6cc3f7f94d825/base_calendar.py |
||
msg['subject'] = ' '.join(map(lambda (x, y): unicode(x, y or 'ascii'), decode_header(msg_txt.get('Subject')))) | msg['subject'] = _decode_header(msg_txt.get('Subject')) | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
msg['from'] = msg_txt.get('From') | msg['from'] = _decode_header(msg_txt.get('From')) | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
msg['to'] = msg_txt.get('Delivered-To') | msg['to'] = _decode_header(msg_txt.get('Delivered-To')) | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
msg['cc'] = msg_txt.get('Cc') | msg['cc'] = _decode_header(msg_txt.get('Cc')) | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
msg['reply'] = msg_txt.get('Reply-To') | msg['reply'] = _decode_header(msg_txt.get('Reply-To')) | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
if part.get_content_subtype() == 'html': body = html2plaintext(content) elif part.get_content_subtype() == 'plain': body = content | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
|
attachents[filename] = part.get_payload(decode=True) | attachents[filename] = content else: if encoding: content = unicode(content, encoding) if part.get_content_subtype() == 'html': body = html2plaintext(content) elif part.get_content_subtype() == 'plain': body = content | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
body += part.get_payload(decode=True) | res = part.get_payload(decode=True) if encoding: res = res.decode(encoding).encode('utf-8') body += res | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
encoding = msg_txt.get_content_charset('utf-8') msg['body'] = msg['body'].decode(encoding).encode('utf-8') | def _process_email(self, cr, uid, server, message, context={}): context.update({ 'server_id':server.id }) history_pool = self.pool.get('mail.server.history') msg_txt = email.message_from_string(message) message_id = msg_txt.get('Message-ID', False) | a6a41d78b0f513b6416c98b45c3ec7f55309ff87 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6a41d78b0f513b6416c98b45c3ec7f55309ff87/fetchmail.py |
|
'where id in ('+','.join(['%s'] * len(sub_ids))+')', sub_ids) | 'where id in %s', (sub_ids,)) | def unlink(self, cr, uid, ids, context=None): """ Delete records with given ids | 265712731443dc97279848ec4c450378943db83b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/265712731443dc97279848ec4c450378943db83b/orm.py |
res = {}.fromkeys(ids, False) | if self._type in ('one2many', 'many2many'): res = {}.fromkeys(ids, []) else: res = {}.fromkeys(ids, False) | def _fnct_read(self, obj, cr, uid, ids, field_name, args, context=None): self._field_get2(cr, uid, obj, context) if not ids: return {} relation = obj._name res = {}.fromkeys(ids, False) | 051c07966bf1c5ea699ef9f5702dbd09216fc4ba /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/051c07966bf1c5ea699ef9f5702dbd09216fc4ba/fields.py |
else: | elif t_data: | def _fnct_read(self, obj, cr, uid, ids, field_name, args, context=None): self._field_get2(cr, uid, obj, context) if not ids: return {} relation = obj._name res = {}.fromkeys(ids, False) | 051c07966bf1c5ea699ef9f5702dbd09216fc4ba /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/051c07966bf1c5ea699ef9f5702dbd09216fc4ba/fields.py |
else: | elif t_data: | def _fnct_read(self, obj, cr, uid, ids, field_name, args, context=None): self._field_get2(cr, uid, obj, context) if not ids: return {} relation = obj._name res = {}.fromkeys(ids, False) | 051c07966bf1c5ea699ef9f5702dbd09216fc4ba /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/051c07966bf1c5ea699ef9f5702dbd09216fc4ba/fields.py |
val = property_obj.browse(cr, uid,prop_ids[0]).value | val = property_obj.browse(cr, uid,prop_ids[0]).value_reference | def create_returns2(self, cr, uid, ids, context): record_id = context.get('record_id', False) order_obj =self.pool.get('pos.order') line_obj = self.pool.get('pos.order.line') picking_obj = self.pool.get('stock.picking') stock_move_obj = self.pool.get('stock.move') property_obj= self.pool.get("ir.property") uom_obj =self. pool.get('product.uom') wf_service = netsvc.LocalService("workflow") #Todo :Need to clean the code if record_id: picking_ids = picking_obj.search(cr, uid, [('pos_order', 'in',[record_id]), ('state', '=', 'done')]) data=self.read(cr,uid,ids)[0] clone_list = [] date_cur=time.strftime('%Y-%m-%d') | b0929a59a28622f2306192e6588ec8628ffa2189 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b0929a59a28622f2306192e6588ec8628ffa2189/pos_return.py |
stock_dest_id = int(val.split(',')[1]) | stock_dest_id = val.id | def create_returns2(self, cr, uid, ids, context): record_id = context.get('record_id', False) order_obj =self.pool.get('pos.order') line_obj = self.pool.get('pos.order.line') picking_obj = self.pool.get('stock.picking') stock_move_obj = self.pool.get('stock.move') property_obj= self.pool.get("ir.property") uom_obj =self. pool.get('product.uom') wf_service = netsvc.LocalService("workflow") #Todo :Need to clean the code if record_id: picking_ids = picking_obj.search(cr, uid, [('pos_order', 'in',[record_id]), ('state', '=', 'done')]) data=self.read(cr,uid,ids)[0] clone_list = [] date_cur=time.strftime('%Y-%m-%d') | b0929a59a28622f2306192e6588ec8628ffa2189 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b0929a59a28622f2306192e6588ec8628ffa2189/pos_return.py |
val = property_obj.browse(cr, uid,prop_ids[0]).value | val = property_obj.browse(cr, uid,prop_ids[0]).value_reference | def select_product(self, cr, uid, ids, context): """ To get the product and quantity and add in order . @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return : Retrun the add product form again for adding more product """ if not context.get('record_id', False): super(add_product).select_product(cr,uid,ids) else: record_id=context.get('record_id', False) data = self.read(cr, uid, ids)[0] order_obj = self.pool.get('pos.order') lines_obj = self.pool.get('pos.order.line') picking_obj = self.pool.get('stock.picking') stock_move_obj = self.pool.get('stock.move') move_obj = self.pool.get('stock.move') property_obj= self.pool.get("ir.property") invoice_obj= self.pool.get('account.invoice') picking_ids = picking_obj.search(cr, uid, [('pos_order', 'in',[record_id]), ('state', '=', 'done')]) clone_list = [] date_cur=time.strftime('%Y-%m-%d') uom_obj = self.pool.get('product.uom') prod_obj=self.pool.get('product.product') wf_service = netsvc.LocalService("workflow") return_boj=self.pool.get('pos.return') order_obj.add_product(cr, uid, record_id,data['product_id'],data['quantity'], context=context) for order_id in order_obj.browse(cr, uid, [record_id], context=context): prod=data['product_id'] qty=data['quantity'] prop_ids = property_obj.search(cr, uid,[('name', '=', 'property_stock_customer')]) val = property_obj.browse(cr, uid,prop_ids[0]).value cr.execute("select s.id from stock_location s, stock_warehouse w where w.lot_stock_id=s.id and w.id= %d "%(order_id.shop_id.warehouse_id.id)) res=cr.fetchone() location_id=res and res[0] or None stock_dest_id = int(val.split(',')[1]) prod_id=prod_obj.browse(cr,uid,prod) new_picking=picking_obj.create(cr,uid,{ 'name':'%s (Added)' %order_id.name, 'move_lines':[], 'state':'draft', 'type':'out', 'date':date_cur, }) new_move=stock_move_obj.create(cr, uid,{ 'product_qty': qty, 'product_uos_qty': uom_obj._compute_qty(cr, uid,prod_id.uom_id.id, qty, prod_id.uom_id.id), 'picking_id':new_picking, 'product_uom':prod_id.uom_id.id, 'location_id':location_id, 'product_id':prod_id.id, 'location_dest_id':stock_dest_id, 'name':'%s (return)' %order_id.name, 'date':date_cur, 'date_planned':date_cur,}) wf_service.trg_validate(uid, 'stock.picking',new_picking,'button_confirm', cr) picking_obj.force_assign(cr, uid, [new_picking], context) order_obj.write(cr,uid,record_id,{'last_out_picking':new_picking}) return { 'name': _('Add Product'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'pos.add.product', 'view_id': False, 'target':'new', 'context':context, 'views': False, 'type': 'ir.actions.act_window', } | b0929a59a28622f2306192e6588ec8628ffa2189 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b0929a59a28622f2306192e6588ec8628ffa2189/pos_return.py |
stock_dest_id = int(val.split(',')[1]) | stock_dest_id = val.id | def select_product(self, cr, uid, ids, context): """ To get the product and quantity and add in order . @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return : Retrun the add product form again for adding more product """ if not context.get('record_id', False): super(add_product).select_product(cr,uid,ids) else: record_id=context.get('record_id', False) data = self.read(cr, uid, ids)[0] order_obj = self.pool.get('pos.order') lines_obj = self.pool.get('pos.order.line') picking_obj = self.pool.get('stock.picking') stock_move_obj = self.pool.get('stock.move') move_obj = self.pool.get('stock.move') property_obj= self.pool.get("ir.property") invoice_obj= self.pool.get('account.invoice') picking_ids = picking_obj.search(cr, uid, [('pos_order', 'in',[record_id]), ('state', '=', 'done')]) clone_list = [] date_cur=time.strftime('%Y-%m-%d') uom_obj = self.pool.get('product.uom') prod_obj=self.pool.get('product.product') wf_service = netsvc.LocalService("workflow") return_boj=self.pool.get('pos.return') order_obj.add_product(cr, uid, record_id,data['product_id'],data['quantity'], context=context) for order_id in order_obj.browse(cr, uid, [record_id], context=context): prod=data['product_id'] qty=data['quantity'] prop_ids = property_obj.search(cr, uid,[('name', '=', 'property_stock_customer')]) val = property_obj.browse(cr, uid,prop_ids[0]).value cr.execute("select s.id from stock_location s, stock_warehouse w where w.lot_stock_id=s.id and w.id= %d "%(order_id.shop_id.warehouse_id.id)) res=cr.fetchone() location_id=res and res[0] or None stock_dest_id = int(val.split(',')[1]) prod_id=prod_obj.browse(cr,uid,prod) new_picking=picking_obj.create(cr,uid,{ 'name':'%s (Added)' %order_id.name, 'move_lines':[], 'state':'draft', 'type':'out', 'date':date_cur, }) new_move=stock_move_obj.create(cr, uid,{ 'product_qty': qty, 'product_uos_qty': uom_obj._compute_qty(cr, uid,prod_id.uom_id.id, qty, prod_id.uom_id.id), 'picking_id':new_picking, 'product_uom':prod_id.uom_id.id, 'location_id':location_id, 'product_id':prod_id.id, 'location_dest_id':stock_dest_id, 'name':'%s (return)' %order_id.name, 'date':date_cur, 'date_planned':date_cur,}) wf_service.trg_validate(uid, 'stock.picking',new_picking,'button_confirm', cr) picking_obj.force_assign(cr, uid, [new_picking], context) order_obj.write(cr,uid,record_id,{'last_out_picking':new_picking}) return { 'name': _('Add Product'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'pos.add.product', 'view_id': False, 'target':'new', 'context':context, 'views': False, 'type': 'ir.actions.act_window', } | b0929a59a28622f2306192e6588ec8628ffa2189 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b0929a59a28622f2306192e6588ec8628ffa2189/pos_return.py |
val = property_obj.browse(cr, uid,prop_ids[0]).value | val = property_obj.browse(cr, uid,prop_ids[0]).value_reference | def close_action(self, cr, uid, ids, context): | b0929a59a28622f2306192e6588ec8628ffa2189 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b0929a59a28622f2306192e6588ec8628ffa2189/pos_return.py |
stock_dest_id = int(val.split(',')[1]) | stock_dest_id = val.id | def close_action(self, cr, uid, ids, context): | b0929a59a28622f2306192e6588ec8628ffa2189 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b0929a59a28622f2306192e6588ec8628ffa2189/pos_return.py |
""" Fetch records as objects allowing to use dot notation to browse fields and relations | """Fetch records as objects allowing to use dot notation to browse fields and relations | def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None): """ Fetch records as objects allowing to use dot notation to browse fields and relations | 1fd49747fb358c5f7acfc5a39ddee79946b7d657 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/1fd49747fb358c5f7acfc5a39ddee79946b7d657/orm.py |
self.logger.log(logging.ERROR, e) | self.logger.exception(e) | def process(self, yaml_string): """ Processes a Yaml string. Custom tags are interpreted by 'process_' instance methods. """ is_preceded_by_comment = False for node in yaml.load(yaml_string): is_preceded_by_comment = self._log(node, is_preceded_by_comment) try: self._process_node(node) except YamlImportException, e: self.logger.log(logging.ERROR, e) except Exception, e: self.logger.log(logging.ERROR, e) raise e | 44f6ef941a26abbdc5bc9528390c2d03a9e7e1dc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/44f6ef941a26abbdc5bc9528390c2d03a9e7e1dc/yaml_import.py |
self.logger.log(logging.ERROR, e) | self.logger.exception(e) | def process(self, yaml_string): """ Processes a Yaml string. Custom tags are interpreted by 'process_' instance methods. """ is_preceded_by_comment = False for node in yaml.load(yaml_string): is_preceded_by_comment = self._log(node, is_preceded_by_comment) try: self._process_node(node) except YamlImportException, e: self.logger.log(logging.ERROR, e) except Exception, e: self.logger.log(logging.ERROR, e) raise e | 44f6ef941a26abbdc5bc9528390c2d03a9e7e1dc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/44f6ef941a26abbdc5bc9528390c2d03a9e7e1dc/yaml_import.py |
if not (line.account_id.parent_left > parent_left and line.account_id.parent_left < parent_right): | if not (line.account_id.parent_left >= parent_left and line.account_id.parent_left <= parent_right): | def _get_chart_account(cursor, user, account): if account.parent_id: chart_account = _get_chart_account(cursor, user, account.parent_id) else: chart_account = account return chart_account | 51ed8f94a0265b08445d50265ae606d04398b851 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/51ed8f94a0265b08445d50265ae606d04398b851/account.py |
'date_start': fields.datetime('Start Date', help="Starting Date of the phase"), 'date_end': fields.datetime('End Date', help="Ending Date of the phase"), | 'date_start': fields.date('Start Date', help="Starting Date of the phase"), 'date_end': fields.date('End Date', help="Ending Date of the phase"), | def _get_default_uom_id(self, cr, uid): model_data_obj = self.pool.get('ir.model.data') model_data_id = model_data_obj._get_id(cr, uid, 'product', 'uom_hour') return model_data_obj.read(cr, uid, [model_data_id], ['res_id'])[0]['res_id'] | c4afa3c7715ea1df11a663d02ed0914b0fa4b3cf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c4afa3c7715ea1df11a663d02ed0914b0fa4b3cf/project_long_term.py |
'product_uom': lambda self,cr,uid,c: self.pool.get('product.uom').search(cr, uid, [('name', '=', 'day')], context=c)[0] | 'product_uom': lambda self,cr,uid,c: self.pool.get('product.uom').search(cr, uid, [('name', '=', _('Day'))], context=c)[0] | def _get_default_uom_id(self, cr, uid): model_data_obj = self.pool.get('ir.model.data') model_data_id = model_data_obj._get_id(cr, uid, 'product', 'uom_hour') return model_data_obj.read(cr, uid, [model_data_id], ['res_id'])[0]['res_id'] | c4afa3c7715ea1df11a663d02ed0914b0fa4b3cf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c4afa3c7715ea1df11a663d02ed0914b0fa4b3cf/project_long_term.py |
next_date += relativedelta(day=line.days2) | next_date += relativedelta(day=31) | def compute(self, cr, uid, id, value, date_ref=False, context={}): if not date_ref: date_ref = datetime.now().strftime('%Y-%m-%d') pt = self.browse(cr, uid, id, context) amount = value result = [] for line in pt.line_ids: prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account') if line.value == 'fixed': amt = round(line.value_amount, prec) elif line.value == 'procent': amt = round(value * line.value_amount, prec) elif line.value == 'balance': amt = round(amount, prec) if amt: next_date = datetime.strptime(date_ref, '%Y-%m-%d') + relativedelta(days=line.days) if line.days2 < 0: next_date += relativedelta(day=line.days2) if line.days2 > 0: next_date += relativedelta(day=line.days2, months=1) result.append( (next_date.strftime('%Y-%m-%d'), amt) ) amount -= amt return result | d62cd51810b9ba0567ca13eb8fc98dc804d1444a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d62cd51810b9ba0567ca13eb8fc98dc804d1444a/account.py |
if obj._columns[field2]._type in ('many2one', 'one2one'): | if field2 in obj._columns.keys() and obj._columns[field2]._type in ('many2one', 'one2one'): | fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2] | a6d5a61714f560094b747e4a43f952b1aff89c3e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a6d5a61714f560094b747e4a43f952b1aff89c3e/orm.py |
pick = pick_obj.browse(cr, uid, record_id) | pick = pick_obj.browse(cr, uid, record_id, context=context) | def default_get(self, cr, uid, fields, context): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary which of fields with values. """ res = super(stock_split_move_line, self).default_get(cr, uid, fields, context=context) record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) for m in [line for line in pick.move_lines]: res['move%s'%(m.id)] = m.product_qty return res | f25bedb4d0206e51ed9d06c2c5a79587504e6975 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f25bedb4d0206e51ed9d06c2c5a79587504e6975/stock_split_move.py |
pick = pick_obj.browse(cr, uid, record_id) | pick = pick_obj.browse(cr, uid, record_id, context=context) | def view_init(self, cr, uid, fields_list, context=None): """ Creates view dynamically and adding fields at runtime. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view with new columns. """ res = super(stock_split_move_line, self).view_init(cr, uid, fields_list, context=context) record_id = context and context.get('active_id', False) or False if record_id: pick_obj = self.pool.get('stock.picking') try: pick = pick_obj.browse(cr, uid, record_id) for m in [line for line in pick.move_lines]: if 'move%s' % m.id not in self._columns: self._columns['move%s' % m.id] = fields.float(string=m.product_id.name) except: return res return res | f25bedb4d0206e51ed9d06c2c5a79587504e6975 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f25bedb4d0206e51ed9d06c2c5a79587504e6975/stock_split_move.py |
pick = pick_obj.browse(cr, uid, record_id) | pick = pick_obj.browse(cr, uid, record_id, context=context) | def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): """ Changes the view dynamically @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view. """ res = super(stock_split_move_line, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False) record_id = context and context.get('active_id', False) or False assert record_id,'Active ID not found' pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) arch_lst = ['<?xml version="1.0"?>', '<form string="Split lines">', '<label string="Indicate here the quantity of the new line. A quantity of zero will not split the line." colspan="4"/>'] for m in [line for line in pick.move_lines]: quantity = m.product_qty arch_lst.append('<field name="move%s" />\n<newline />' % (m.id,)) res['fields']['move%s' % m.id] = {'string' : m.product_id.name, 'type' : 'float', 'required' : True} arch_lst.append('<group col="2" colspan="4">') arch_lst.append('<button icon="gtk-cancel" special="cancel" string="Cancel" />') arch_lst.append('<button name="split_lines" string="Split" colspan="1" type="object" icon="gtk-apply" />') arch_lst.append('</group>') arch_lst.append('</form>') res['arch'] = '\n'.join(arch_lst) return res | f25bedb4d0206e51ed9d06c2c5a79587504e6975 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f25bedb4d0206e51ed9d06c2c5a79587504e6975/stock_split_move.py |
pick = pick_obj.browse(cr, uid, record_id) | pick = pick_obj.browse(cr, uid, record_id, context=context) | def split_lines(self, cr, uid, ids, context): """ Splits moves in quantity given in the wizard. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of ids selected @param context: A standard dictionary @return: A dictionary which of fields with values. """ move_obj = self.pool.get('stock.move') record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) data = self.read(cr, uid, ids[0]) move_ids = [m.id for m in [line for line in pick.move_lines]] for move in move_obj.browse(cr, uid, move_ids): quantity = data['move%s' % move.id] if 0 < quantity < move.product_qty: new_qty = move.product_qty - quantity new_uos_qty = new_qty / move.product_qty * move.product_uos_qty new_obj = move_obj.copy(cr, uid, move.id, {'product_qty' : new_qty, 'product_uos_qty': new_uos_qty, 'state':move.state}) uos_qty = quantity / move.product_qty * move.product_uos_qty move_obj.write(cr, uid, [move.id], {'product_qty' : quantity, 'product_uos_qty': uos_qty}) return {} | f25bedb4d0206e51ed9d06c2c5a79587504e6975 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f25bedb4d0206e51ed9d06c2c5a79587504e6975/stock_split_move.py |
for move in move_obj.browse(cr, uid, move_ids): | for move in move_obj.browse(cr, uid, move_ids, context=context): | def split_lines(self, cr, uid, ids, context): """ Splits moves in quantity given in the wizard. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of ids selected @param context: A standard dictionary @return: A dictionary which of fields with values. """ move_obj = self.pool.get('stock.move') record_id = context and context.get('active_id', False) or False pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id) data = self.read(cr, uid, ids[0]) move_ids = [m.id for m in [line for line in pick.move_lines]] for move in move_obj.browse(cr, uid, move_ids): quantity = data['move%s' % move.id] if 0 < quantity < move.product_qty: new_qty = move.product_qty - quantity new_uos_qty = new_qty / move.product_qty * move.product_uos_qty new_obj = move_obj.copy(cr, uid, move.id, {'product_qty' : new_qty, 'product_uos_qty': new_uos_qty, 'state':move.state}) uos_qty = quantity / move.product_qty * move.product_uos_qty move_obj.write(cr, uid, [move.id], {'product_qty' : quantity, 'product_uos_qty': uos_qty}) return {} | f25bedb4d0206e51ed9d06c2c5a79587504e6975 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f25bedb4d0206e51ed9d06c2c5a79587504e6975/stock_split_move.py |
result['res_id'] = created_inv | invoice_domain = eval(result['domain']) invoice_domain.append(('id', '=', created_inv)) result['domain'] = invoice_domain | def compute_refund(self, cr, uid, ids, mode='refund', context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: the account invoice refund’s ID or list of IDs | 378fb690e3c4fb0a0a7f225b1b11e9c39637a981 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/378fb690e3c4fb0a0a7f225b1b11e9c39637a981/account_invoice_refund.py |
def compute_refund(self, cr, uid, ids, mode='refund', context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: the account invoice refund’s ID or list of IDs | 378fb690e3c4fb0a0a7f225b1b11e9c39637a981 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/378fb690e3c4fb0a0a7f225b1b11e9c39637a981/account_invoice_refund.py |
||
ids = self.pool.get('ir.actions.act_window').search(cr, uid, [('usage','=','menu')], context=context) return ids and ids[0] or False | dataobj = self.pool.get('ir.model.data') try: model, res_id = dataobj.get_object_reference(cr, uid, 'base', 'action_menu_admin') if model != 'ir.actions.act_window': return False return res_id except ValueError: return False | def _get_menu(self,cr, uid, context=None): ids = self.pool.get('ir.actions.act_window').search(cr, uid, [('usage','=','menu')], context=context) return ids and ids[0] or False | 40cf6eac7a41306959c7d68d5241da13a10721cc /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/40cf6eac7a41306959c7d68d5241da13a10721cc/res_user.py |
res = etree.XML("<search string='%s'></search>" % root.get("string", "")) | res = etree.XML("""<search string="%s"></search>""" % root.get("string", "")) | def encode(s): if isinstance(s, unicode): return s.encode('utf8') return s | 54b19dc00832c195dcf6c74cc084b40ebc5e1daf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/54b19dc00832c195dcf6c74cc084b40ebc5e1daf/orm.py |
limit = datetime.strptime(move.date_expected, '%Y-%m-%d %H:%M:%S') + relativedelta(months=product.warranty) | limit = datetime.strptime(move.date_expected, '%Y-%m-%d %H:%M:%S') + relativedelta(months=int(product.warranty)) | def onchange_move_id(self, cr, uid, ids, prod_id=False, move_id=False): """ On change of move id sets values of guarantee limit, source location, destination location, partner and partner address. @param prod_id: Id of product in current record. @param move_id: Changed move. @return: Dictionary of values. """ data = {} data['value'] = {} if not prod_id: return data if move_id: move = self.pool.get('stock.move').browse(cr, uid, move_id) product = self.pool.get('product.product').browse(cr, uid, prod_id) limit = datetime.strptime(move.date_expected, '%Y-%m-%d %H:%M:%S') + relativedelta(months=product.warranty) data['value']['guarantee_limit'] = limit.strftime('%Y-%m-%d') data['value']['location_id'] = move.location_dest_id.id data['value']['location_dest_id'] = move.location_dest_id.id if move.address_id: data['value']['partner_id'] = move.address_id.partner_id and move.address_id.partner_id.id else: data['value']['partner_id'] = False data['value']['address_id'] = move.address_id and move.address_id.id d = self.onchange_partner_id(cr, uid, ids, data['value']['partner_id'], data['value']['address_id']) data['value'].update(d['value']) return data | 2aefa41343be642ec7fb6cde37f4d11fc592fed0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/2aefa41343be642ec7fb6cde37f4d11fc592fed0/mrp_repair.py |
exval = map(lambda x: str(x), cal_data.value) | exdates += cal_data.value exval = map(lambda x: x.strftime('%Y%m%dT%H%M%SZ'), exdates) | def parse_ics(self, cr, uid, child, cal_children=None, context=None): """ parse calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ | 3f284dd7e9d77fb480f46eb1d039e5b7f3109d77 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3f284dd7e9d77fb480f46eb1d039e5b7f3109d77/calendar.py |
def parse_ics(self, cr, uid, child, cal_children=None, context=None): """ parse calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ | 3f284dd7e9d77fb480f46eb1d039e5b7f3109d77 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3f284dd7e9d77fb480f46eb1d039e5b7f3109d77/calendar.py |
||
elif field == 'vtimezone' and data[map_field] and data[map_field] not in timezones: | elif field == 'vtimezone' and data[map_field]: | def create_ics(self, cr, uid, datas, name, ical, context=None): """ create calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ | 3f284dd7e9d77fb480f46eb1d039e5b7f3109d77 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3f284dd7e9d77fb480f46eb1d039e5b7f3109d77/calendar.py |
tz_obj = self.pool.get('basic.calendar.timezone') ical = tz_obj.export_cal(cr, uid, None, \ data[map_field], ical, context=context) timezones.append(data[map_field]) | if tzval not in timezones: tz_obj = self.pool.get('basic.calendar.timezone') ical = tz_obj.export_cal(cr, uid, None, \ data[map_field], ical, context=context) timezones.append(data[map_field]) | def create_ics(self, cr, uid, datas, name, ical, context=None): """ create calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ | 3f284dd7e9d77fb480f46eb1d039e5b7f3109d77 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3f284dd7e9d77fb480f46eb1d039e5b7f3109d77/calendar.py |
dtfield.value = parser.parse(data[map_field]) | def create_ics(self, cr, uid, datas, name, ical, context=None): """ create calendaring and scheduling information @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ | 3f284dd7e9d77fb480f46eb1d039e5b7f3109d77 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3f284dd7e9d77fb480f46eb1d039e5b7f3109d77/calendar.py |
|
def _sum_debit_period(self, period_id, journal_id=False): | 5f9e013af4a4fa4e0dc0fc7ae1ac38e439a15a12 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5f9e013af4a4fa4e0dc0fc7ae1ac38e439a15a12/account_general_journal.py |
||
self.cr.execute('SELECT SUM(debit) FROM account_move_line ' 'WHERE period_id=%s AND journal_id IN %s ' | self.cr.execute('SELECT SUM(debit) FROM account_move_line l ' 'WHERE period_id=%s AND journal_id IN %s ' + self.query_get_clause + ' ' \ | def _sum_debit_period(self, period_id, journal_id=False): | 5f9e013af4a4fa4e0dc0fc7ae1ac38e439a15a12 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5f9e013af4a4fa4e0dc0fc7ae1ac38e439a15a12/account_general_journal.py |
self.cr.execute('SELECT SUM(credit) FROM account_move_line ' 'WHERE period_id=%s AND journal_id IN %s ' | self.cr.execute('SELECT SUM(credit) FROM account_move_line l ' 'WHERE period_id=%s AND journal_id IN %s '+ self.query_get_clause + ' ' \ | def _sum_credit_period(self, period_id, journal_id=None): if journal_id: journals = [journal_id] else: journals = self.journal_ids if not journals: return 0.0 self.cr.execute('SELECT SUM(credit) FROM account_move_line ' 'WHERE period_id=%s AND journal_id IN %s ' 'AND state<>\'draft\'', (period_id, tuple(journals))) return self.cr.fetchone()[0] or 0.0 | 5f9e013af4a4fa4e0dc0fc7ae1ac38e439a15a12 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5f9e013af4a4fa4e0dc0fc7ae1ac38e439a15a12/account_general_journal.py |
if tools.config.get('xmlrpcs', False): reg_http_service(HTTPDir('/xmlrpc/', XMLRPCRequestHandler, True)) logging.getLogger("web-services").info("Registered XML-RPC over HTTPS") | if tools.config.get('xmlrpcs', False) \ and not tools.config.get('xmlrpc', False): reg_http_service(HTTPDir('/xmlrpc/', XMLRPCRequestHandler), True) logging.getLogger("web-services").info("Registered XML-RPC over HTTPS only") | def init_xmlrpc(): if tools.config.get('xmlrpc', False): # Example of http file serving: # reg_http_service(HTTPDir('/test/',HTTPHandler)) reg_http_service(HTTPDir('/xmlrpc/', XMLRPCRequestHandler)) logging.getLogger("web-services").info("Registered XML-RPC over HTTP") if tools.config.get('xmlrpcs', False): reg_http_service(HTTPDir('/xmlrpc/', XMLRPCRequestHandler, True)) logging.getLogger("web-services").info("Registered XML-RPC over HTTPS") | 9002f4795114b02d9082bccc3f5472acf10fbe98 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/9002f4795114b02d9082bccc3f5472acf10fbe98/http_server.py |
if not context.get('alarm_id', False): self.do_alarm_unlink(cr, uid, [data.id], model) return True | def do_alarm_create(self, cr, uid, ids, model, date, context=None): """ Create Alarm for event. @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of res alarm’s IDs. @param model: Model name. @param date: Event date @param context: A standard dictionary for contextual values @return: True """ if not context: context = {} alarm_obj = self.pool.get('calendar.alarm') res_alarm_obj = self.pool.get('res.alarm') ir_obj = self.pool.get('ir.model') model_id = ir_obj.search(cr, uid, [('model', '=', model)])[0] | adf548ad9c5232c74200f848d97e74911c12b779 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/adf548ad9c5232c74200f848d97e74911c12b779/base_calendar.py |
|
datas['end_date'] = ''.join((re.compile('\d')).findall(datas.get('end_date'))) + '235959Z' | datas['end_date'] = ''.join((re.compile('\d')).findall(datas.get('end_date'))) + 'T235959Z' | def compute_rule_string(self, cr, uid, datas, context=None, *args): """ Compute rule string according to value type RECUR of iCalendar from the values given. @param self: the object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param datas: dictionary of freq and interval value. @param context: A standard dictionary for contextual values @return: String value of the format RECUR of iCalendar """ | adf548ad9c5232c74200f848d97e74911c12b779 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/adf548ad9c5232c74200f848d97e74911c12b779/base_calendar.py |
ls = base_calendar_id2real_id(base_calendar_id, with_date=res.get('duration', 0)) | ls = base_calendar_id2real_id(base_calendar_id, with_date=res and res.get('duration', 0) or 0) | def read(self, cr, uid, ids, fields=None, context=None, load='_classic_read'): """ Overrides orm Read method.Read List of fields for calendar event. @param cr: the current row, from the database cursor, @param user: the current user’s ID for security checks, @param ids: List of calendar event's id. @param fields: List of fields. @param context: A standard dictionary for contextual values @return: List of Dictionary of form [{‘name_of_the_field’: value, ...}, ...] """ if not context: context = {} | adf548ad9c5232c74200f848d97e74911c12b779 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/adf548ad9c5232c74200f848d97e74911c12b779/base_calendar.py |
for id in ids: ls = base_calendar_id2real_id(id) if not isinstance(ls, (str, int, long)) and len(ls) >= 2: date_new = ls[1] for record in self.read(cr, uid, [base_calendar_id2real_id(id)], \ ['date', 'rrule', 'exdate']): if record['rrule']: exdate = (record['exdate'] and (record['exdate'] + ',') or '') + ''.join((re.compile('\d')).findall(date_new)) + 'Z' if record['date'] == date_new: res = self.write(cr, uid, [base_calendar_id2real_id(id)], {'exdate': exdate}) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, \ base_calendar_id2real_id(ids)) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, ids) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name) | for event_id in ids: if isinstance(event_id, (int, long)): res = super(calendar_event, self).unlink(cr, uid, event_id) self.pool.get('res.alarm').do_alarm_unlink(cr, uid, [event_id], self._name) continue event_id, date_new = event_id.split('-') event_id = [int(event_id)] for record in self.read(cr, uid, event_id, ['date', 'rrule', 'exdate']): if record['rrule']: date_new = time.strftime("%Y-%m-%d %H:%M:%S", \ time.strptime(date_new, "%Y%m%d%H%M%S")) exdate = (record['exdate'] and (record['exdate'] + ',') or '') + ''.join((re.compile('\d')).findall(date_new)) + 'Z' res = self.write(cr, uid, event_id, {'exdate': exdate}) else: res = super(calendar_event, self).unlink(cr, uid, event_id) self.pool.get('res.alarm').do_alarm_unlink(cr, uid, event_id, self._name) | def unlink(self, cr, uid, ids, context=None): """ Deletes records specified in ids. @param self: the object pointer. @param cr: the current row, from the database cursor, @param id: List of calendar event's id. @param context: A standard dictionary for contextual values @return: True """ res = False for id in ids: ls = base_calendar_id2real_id(id) if not isinstance(ls, (str, int, long)) and len(ls) >= 2: date_new = ls[1] for record in self.read(cr, uid, [base_calendar_id2real_id(id)], \ ['date', 'rrule', 'exdate']): if record['rrule']: exdate = (record['exdate'] and (record['exdate'] + ',') or '') + ''.join((re.compile('\d')).findall(date_new)) + 'Z' if record['date'] == date_new: res = self.write(cr, uid, [base_calendar_id2real_id(id)], {'exdate': exdate}) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, \ base_calendar_id2real_id(ids)) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name) else: ids = map(lambda x: base_calendar_id2real_id(x), ids) res = super(calendar_event, self).unlink(cr, uid, ids) alarm_obj = self.pool.get('res.alarm') alarm_obj.do_alarm_unlink(cr, uid, ids, self._name) return res | adf548ad9c5232c74200f848d97e74911c12b779 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/adf548ad9c5232c74200f848d97e74911c12b779/base_calendar.py |
LOG_DEBUG_RPC_ANSWER = 'debug_rpc_answer' | def abortResponse(self, error, description, origin, details): if not tools.config['debug_mode']: raise Exception("%s -- %s\n\n%s"%(origin, description, details)) else: raise | 0228f8e8891fb4d532e8ef57fec29adf159b3155 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0228f8e8891fb4d532e8ef57fec29adf159b3155/netsvc.py |
|
description += "\n" + "=======================" + "\n" + data['description'] | description += "\n" + "=======================" + "\n" + notes | def close(self, cr, uid, ids, context=None): data = self.read(cr,uid,ids)[0] task_pool = self.pool.get('project.task') user_name = self.pool.get('res.users').browse(cr, uid, uid).name description = _("Closed By ") + user_name + _(" At ") + time.strftime('%Y-%m-%d %H:%M:%S') description += "\n" + "=======================" + "\n" + data['description'] if 'task_id' in context: task = task_pool.browse(cr, uid, context['task_id']) description = task.description + "\n\n" + description task_pool.write(cr, uid, [task.id], { 'description': description, 'state': 'done', 'date_end':time.strftime('%Y-%m-%d %H:%M:%S'), 'remaining_hours': 0.0 }) return {} | 4f51e523a6d6c7dc44e72c49e98c46cbf8d2c1e8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4f51e523a6d6c7dc44e72c49e98c46cbf8d2c1e8/project_close_task.py |
description = task.description + "\n\n" + description | description = task.description and task.description + "\n\n" + description | def close(self, cr, uid, ids, context=None): data = self.read(cr,uid,ids)[0] task_pool = self.pool.get('project.task') user_name = self.pool.get('res.users').browse(cr, uid, uid).name description = _("Closed By ") + user_name + _(" At ") + time.strftime('%Y-%m-%d %H:%M:%S') description += "\n" + "=======================" + "\n" + data['description'] if 'task_id' in context: task = task_pool.browse(cr, uid, context['task_id']) description = task.description + "\n\n" + description task_pool.write(cr, uid, [task.id], { 'description': description, 'state': 'done', 'date_end':time.strftime('%Y-%m-%d %H:%M:%S'), 'remaining_hours': 0.0 }) return {} | 4f51e523a6d6c7dc44e72c49e98c46cbf8d2c1e8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4f51e523a6d6c7dc44e72c49e98c46cbf8d2c1e8/project_close_task.py |
test_context = {'ref': self._ref, '_ref': self._ref} | local_context = {'ref': self._ref, '_ref': self._ref} context = self.get_context(assertion, local_context) | def process_assert(self, node): assertion, expressions = node.items()[0] | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
record = model.browse(self.cr, self.uid, id, assertion.context) | record = model.browse(self.cr, self.uid, id, context) | def process_assert(self, node): assertion, expressions = node.items()[0] | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
success = eval(test, test_context, record) | success = eval(test, local_context, record) | def process_assert(self, node): assertion, expressions = node.items()[0] | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
model = self.get_model(record.model) record_dict = self._create_record(model, fields) id = self.pool.get('ir.model.data')._update(self.cr, self.uid, record.model, \ self.module, record_dict, record.id, mode=self.mode) self.id_map[record.id] = int(id) if config.get('import_partial', False): self.cr.commit() | if self.isnoupdate(record) and self.mode != 'init': model = self.get_model(record.model) record_dict = self._create_record(model, fields) id = self.pool.get('ir.model.data')._update(self.cr, self.uid, record.model, \ self.module, record_dict, record.id, noupdate=self.isnoupdate(node), mode=self.mode) self.id_map[record.id] = int(id) if config.get('import_partial', False): self.cr.commit() | def process_record(self, node): record, fields = node.items()[0] | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
if self.mode != 'init': | function, values = node.items()[0] if self.isnoupdate(function) and self.mode != 'init': | def process_function(self, node): if self.mode != 'init': return function, values = node.items()[0] local_context = {'ref': self._ref, '_ref': self._ref} context = self.get_context(node, local_context) args = [] if function.eval: args = eval(function.eval, local_context) for value in values: if not 'model' in value and (not 'eval' in value or not 'search' in value): raise YamlImportException('You must provide a "model" and an "eval" or "search" to evaluate.') value_model = self.get_model(value['model']) local_context = {'ref': self._ref, '_ref': self._ref} local_context['obj'] = lambda x: value_model.browse(self.cr, self.uid, x, context=context) local_context.update(self.id_map) id = eval(value['eval'], local_context) if id != None: args.append(id) model = self.get_model(function.model) method = function.name getattr(model, method)(self.cr, self.uid, *args) | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
function, values = node.items()[0] | def process_function(self, node): if self.mode != 'init': return function, values = node.items()[0] local_context = {'ref': self._ref, '_ref': self._ref} context = self.get_context(node, local_context) args = [] if function.eval: args = eval(function.eval, local_context) for value in values: if not 'model' in value and (not 'eval' in value or not 'search' in value): raise YamlImportException('You must provide a "model" and an "eval" or "search" to evaluate.') value_model = self.get_model(value['model']) local_context = {'ref': self._ref, '_ref': self._ref} local_context['obj'] = lambda x: value_model.browse(self.cr, self.uid, x, context=context) local_context.update(self.id_map) id = eval(value['eval'], local_context) if id != None: args.append(id) model = self.get_model(function.model) method = function.name getattr(model, method)(self.cr, self.uid, *args) | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
|
context = self.get_context(node, local_context) | context = self.get_context(function, local_context) | def process_function(self, node): if self.mode != 'init': return function, values = node.items()[0] local_context = {'ref': self._ref, '_ref': self._ref} context = self.get_context(node, local_context) args = [] if function.eval: args = eval(function.eval, local_context) for value in values: if not 'model' in value and (not 'eval' in value or not 'search' in value): raise YamlImportException('You must provide a "model" and an "eval" or "search" to evaluate.') value_model = self.get_model(value['model']) local_context = {'ref': self._ref, '_ref': self._ref} local_context['obj'] = lambda x: value_model.browse(self.cr, self.uid, x, context=context) local_context.update(self.id_map) id = eval(value['eval'], local_context) if id != None: args.append(id) model = self.get_model(function.model) method = function.name getattr(model, method)(self.cr, self.uid, *args) | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
res_id=res and res[0] or False) | noupdate=self.isnoupdate(node), res_id=res and res[0] or False) | def process_menuitem(self, node): self.validate_xml_id(node.id) | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
self.pool.get('ir.model.data').ir_set(self.cr, self.uid, 'action', \ keyword, node.id, [node.src_model], value, replace=replace, isobject=True, xml_id=node.id) | self.pool.get('ir.model.data').ir_set(self.cr, self.uid, 'action', keyword, \ node.id, [node.src_model], value, replace=replace, noupdate=self.isnoupdate(node), isobject=True, xml_id=node.id) | def process_act_window(self, node): self.validate_xml_id(node.id) view_id = False if node.view: view_id = self.get_id(node.view) context = eval(node.context, {'ref': self._ref, '_ref': self._ref}) | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
self.pool.get('ir.model.data').ir_set(self.cr, self.uid, \ 'action', keyword, node.url, ["ir.actions.url"], value, replace=replace, isobject=True, xml_id=node.id) | self.pool.get('ir.model.data').ir_set(self.cr, self.uid, 'action', \ keyword, node.url, ["ir.actions.url"], value, replace=replace, \ noupdate=self.isnoupdate(node), isobject=True, xml_id=node.id) | def process_url(self, node): self.validate_xml_id(node.id) | cf770834143a7425671dada9f5af3c69732ef5a5 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cf770834143a7425671dada9f5af3c69732ef5a5/yaml_test.py |
for item in obj.user_id.child_ids: list_ids.append(item.id) | children = obj.pool.get('report_account_analytic.planning')._child_compute(cr, user, [obj.user_id.id], '', []) for u_id in children.get(obj.user_id.id, []): list_ids.append(u_id) | def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None): if not context: context = context res = {} for obj in obj.browse(cr, user, ids, context=context): res[obj.id] = [] list_ids = [] for item in obj.user_id.child_ids: list_ids.append(item.id) list_ids.append(obj.user_id.id) ids2 = obj.pool.get(self._obj).search(cr, user, ['&',(self._fields_id,'=',obj.id),'|',('user_id','in',list_ids),('user_id','=',False)], limit=self._limit) for r in obj.pool.get(self._obj)._read_flat(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): if r[self._fields_id] not in res: res[r[self._fields_id]] = [] res[r[self._fields_id]].append( r['id'] ) return res | 63b6ec6050abd49262b25b84f93a5e7228817962 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/63b6ec6050abd49262b25b84f93a5e7228817962/project_planning.py |
def init(self, cr): cr.execute(""" create or replace view report_account_analytic_planning_stat as ( SELECT min(l.id) as id, l.user_id as user_id, a.user_id as manager_id, l.account_id as account_id, sum(l.amount/u.factor) as sum_amount, l.planning_id FROM report_account_analytic_planning_line l LEFT JOIN report_account_analytic_planning a on (a.id = l.planning_id) LEFT JOIN product_uom u on (l.amount_unit = u.id) GROUP BY l.planning_id, l.user_id, l.account_id, a.user_id ) """) | 63b6ec6050abd49262b25b84f93a5e7228817962 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/63b6ec6050abd49262b25b84f93a5e7228817962/project_planning.py |
||
j_id = j_ids and j_ids[0] or False | journal_id = j_ids and j_ids[0] or False | def on_change_unit_amount(self, cr, uid, id, prod_id, quantity, company_id, unit=False, journal_id=False, context=None): if context==None: context={} if not journal_id: j_ids = self.pool.get('account.analytic.journal').search(cr, uid, [('type','=','purchase')]) j_id = j_ids and j_ids[0] or False if not journal_id or not prod_id: return {} product_obj = self.pool.get('product.product') analytic_journal_obj =self.pool.get('account.analytic.journal') | 93e46a396e8ceab846144abe315fd43dbd422eaf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/93e46a396e8ceab846144abe315fd43dbd422eaf/account_analytic_line.py |
objname = uri2[-1] | objname = misc.ustr(uri2[-1]) | def put(self, uri, data, content_type=None): """ put the object into the filesystem """ self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type)) cr, uid, pool,dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() raise DAV_Forbidden try: node = self.uri2object(cr, uid, pool, uri2[:]) except Exception: node = False objname = uri2[-1] ret = None if not node: dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) if not dir_node: cr.close() raise DAV_NotFound('Parent folder not found') | 91a0bbf1c53c7a3ae97ce6556029c2fbfcc168c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/91a0bbf1c53c7a3ae97ce6556029c2fbfcc168c0/dav_fs.py |
ret = (hurl, etag) | ret = (str(hurl), etag) | def put(self, uri, data, content_type=None): """ put the object into the filesystem """ self.parent.log_message('Putting %s (%d), %s'%( misc.ustr(uri), data and len(data) or 0, content_type)) cr, uid, pool,dbname, uri2 = self.get_cr(uri) if not dbname: if cr: cr.close() raise DAV_Forbidden try: node = self.uri2object(cr, uid, pool, uri2[:]) except Exception: node = False objname = uri2[-1] ret = None if not node: dir_node = self.uri2object(cr, uid, pool, uri2[:-1]) if not dir_node: cr.close() raise DAV_NotFound('Parent folder not found') | 91a0bbf1c53c7a3ae97ce6556029c2fbfcc168c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/91a0bbf1c53c7a3ae97ce6556029c2fbfcc168c0/dav_fs.py |
def onchange_journal(self, cr, uid, ids, journal_id): | def onchange_journal(self, cr, uid, ids, journal_id, line_ids, tax_id, partner_id, context={}): | def onchange_journal(self, cr, uid, ids, journal_id): if not journal_id: return False journal_pool = self.pool.get('account.journal') journal = journal_pool.browse(cr, uid, journal_id) account_id = journal.default_credit_account_id or journal.default_debit_account_id tax_id = False if account_id and account_id.tax_ids: tax_id = account_id.tax_ids[0].id return {'value':{'tax_id':tax_id}} | da3be9e496193e22109f36eaeee23d4e35ec36c2 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/da3be9e496193e22109f36eaeee23d4e35ec36c2/voucher.py |
return {'value':{'tax_id':tax_id}} | vals = self.onchange_price(cr, uid, ids, line_ids, tax_id, partner_id, context) vals['value'].update({'tax_id':tax_id}) return vals | def onchange_journal(self, cr, uid, ids, journal_id): if not journal_id: return False journal_pool = self.pool.get('account.journal') journal = journal_pool.browse(cr, uid, journal_id) account_id = journal.default_credit_account_id or journal.default_debit_account_id tax_id = False if account_id and account_id.tax_ids: tax_id = account_id.tax_ids[0].id return {'value':{'tax_id':tax_id}} | da3be9e496193e22109f36eaeee23d4e35ec36c2 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/da3be9e496193e22109f36eaeee23d4e35ec36c2/voucher.py |
proj_name = tools.ustr(procurement.name) proj_exist_id = self.pool.get('project.project').search(cr, uid, [('name','=',proj_name)], context=context) if not proj_exist_id: project_id = self.pool.get('project.project').create(cr, uid, {'name':proj_name}) else: project_id = proj_exist_id[0] | def action_produce_assign_service(self, cr, uid, ids, context={}): for procurement in self.browse(cr, uid, ids): sline = self.pool.get('sale.order.line') sale_ids = sline.search(cr, uid, [('procurement_id','=',procurement.id)], context) content = '' l = None project_id = None for line in sline.browse(cr, uid, sale_ids, context=context): content += (line.notes or '') l = line if line.order_id.project_id: content+="\n\n"+line.order_id.project_id.complete_name | 4fbdd7b65e82c2cd9177189b2fe794933365e12a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4fbdd7b65e82c2cd9177189b2fe794933365e12a/mrp.py |
|
'name': (procurement.origin or procurement.product_id.name) +': '+(procurement.name or ''), | 'name': '%s:%s' %(procurement.product_id.name or procurement.origin, procurement.name or ''), | def action_produce_assign_service(self, cr, uid, ids, context={}): for procurement in self.browse(cr, uid, ids): sline = self.pool.get('sale.order.line') sale_ids = sline.search(cr, uid, [('procurement_id','=',procurement.id)], context) content = '' l = None project_id = None for line in sline.browse(cr, uid, sale_ids, context=context): content += (line.notes or '') l = line if line.order_id.project_id: content+="\n\n"+line.order_id.project_id.complete_name | 4fbdd7b65e82c2cd9177189b2fe794933365e12a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4fbdd7b65e82c2cd9177189b2fe794933365e12a/mrp.py |
'partner_id': l and l.order_id.partner_id.id or False }) | 'partner_id': l and l.order_id.partner_id.id or False, 'project_id': project_id, },context=context) | def action_produce_assign_service(self, cr, uid, ids, context={}): for procurement in self.browse(cr, uid, ids): sline = self.pool.get('sale.order.line') sale_ids = sline.search(cr, uid, [('procurement_id','=',procurement.id)], context) content = '' l = None project_id = None for line in sline.browse(cr, uid, sale_ids, context=context): content += (line.notes or '') l = line if line.order_id.project_id: content+="\n\n"+line.order_id.project_id.complete_name | 4fbdd7b65e82c2cd9177189b2fe794933365e12a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4fbdd7b65e82c2cd9177189b2fe794933365e12a/mrp.py |
'name': fields.many2one('ir.model', 'Object', required=True), | 'name': fields.char('Rule Name', size=64, required=True), 'model_id': fields.many2one('ir.model', 'Object', required=True), | def priority_get(self, cr, uid, context={}): """ Get Priority @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param context: A standard dictionary for contextual values """ return [('', '')] | 1d07f4092b58310420993921b0ddeb3ac65bb096 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/1d07f4092b58310420993921b0ddeb3ac65bb096/base_action_rule.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.