rem
stringlengths 1
322k
| add
stringlengths 0
2.05M
| context
stringlengths 4
228k
| meta
stringlengths 156
215
|
---|---|---|---|
arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context) | arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, fields_def.keys(), context) | fields_def = self.__view_look_dom(cr, user, node, view_id, context=context) | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
date_found = False for dt in ['date','date_start','x_date','x_date_start']: if dt in self._columns: self._date_name = dt date_found = True break if not date_found: raise except_orm(_('Invalid Object Architecture!'),_("Insufficient fields for Calendar View!")) | date_found = False for dt in ['date','date_start','x_date','x_date_start']: if dt in self._columns: self._date_name = dt date_found = True break if not date_found: raise except_orm(_('Invalid Object Architecture!'),_("Insufficient fields for Calendar View!")) | def __get_default_calendar_view(self): """Generate a default calendar view (For internal use only). """ | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
if dt_delay in self._columns: arch += ' date_delay="' + dt_delay + '"' break | if dt_delay in self._columns: arch += ' date_delay="' + dt_delay + '"' break | def __get_default_calendar_view(self): """Generate a default calendar view (For internal use only). """ | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
msg = "Form View contain more than one default_focus attribute" netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg) raise except_orm('View Error !',msg) | msg = "Form View contain more than one default_focus attribute" netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, msg) raise except_orm('View Error !',msg) | def clean(x): x = x[2] for key in ('report_sxw_content', 'report_rml_content', 'report_sxw', 'report_rml', 'report_sxw_content_data', 'report_rml_content_data'): if key in x: del x[key] return x | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
and defaults[dv] and isinstance(defaults[dv][0], (int, long)): defaults[dv] = [(6, 0, defaults[dv])] | and defaults[dv] and isinstance(defaults[dv][0], (int, long)): defaults[dv] = [(6, 0, defaults[dv])] | def _add_missing_default_values(self, cr, uid, values, context=None): missing_defaults = [] avoid_tables = [] # avoid overriding inherited values when parent is set for tables, parent_field in self._inherits.items(): if parent_field in values: avoid_tables.append(tables) for field in self._columns.keys(): if not field in values: missing_defaults.append(field) for field in self._inherit_fields.keys(): if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables): missing_defaults.append(field) | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
where_clause = ' where '+string.join(where_clause, ' and ') | where_clause = ' where ' + ' and '.join(where_clause) | def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None): """ Get the list of records in list view grouped by the given ``groupby`` fields | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
if groupby and fget.has_key(groupby): | if groupby and groupby in fget: | def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None): """ Get the list of records in list view grouped by the given ``groupby`` fields | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s" \ | cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \ | def write(self, cr, user, ids, vals, context=None): """ Update records with given ids with the given field values | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
string.join(upd0, ',') + ' where id = %s', upd1) | ','.join(upd0) + ' where id = %s', upd1) | def _store_set_values(self, cr, uid, ids, fields, context): if not ids: return True field_flag = False field_dict = {} if self._log_access: cr.execute('select id,write_date from '+self._table+' where id IN %s',(tuple(ids),)) res = cr.fetchall() for r in res: if r[1]: field_dict.setdefault(r[0], []) res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S') write_date = datetime.datetime.fromtimestamp(time.mktime(res_date)) for i in self.pool._store_function.get(self._name, []): if i[5]: up_write_date = write_date + datetime.timedelta(hours=i[5]) if datetime.datetime.now() < up_write_date: if i[1] in fields: field_dict[r[0]].append(i[1]) if not field_flag: field_flag = True todo = {} keys = [] for f in fields: if self._columns[f]._multi not in keys: keys.append(self._columns[f]._multi) todo.setdefault(self._columns[f]._multi, []) todo[self._columns[f]._multi].append(f) for key in keys: val = todo[key] if key: # uid == 1 for accessing objects having rules defined on store fields result = self._columns[val[0]].get(cr, self, ids, val, 1, context=context) for id,value in result.items(): if field_flag: for f in value.keys(): if f in field_dict[id]: value.pop(f) upd0 = [] upd1 = [] for v in value: if v not in val: continue if self._columns[v]._type in ('many2one', 'one2one'): try: value[v] = value[v][0] except: pass upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0]) upd1.append(self._columns[v]._symbol_set[1](value[v])) upd1.append(id) if upd0 and upd1: cr.execute('update "' + self._table + '" set ' + \ string.join(upd0, ',') + ' where id = %s', upd1) | 827ce83b5767611573e3e504cd882f1684b5bd2a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/827ce83b5767611573e3e504cd882f1684b5bd2a/orm.py |
def _product_value(self, cr, uid, ids, field_names, arg, context=None): """Computes stock value (real and virtual) for a product, as well as stock qty (real and virtual). @param field_names: Name of field @return: Dictionary of values """ result = dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids]) | 6f2bb74f1519e7d0382e6ec1c4a25e47758f91b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/6f2bb74f1519e7d0382e6ec1c4a25e47758f91b8/stock.py |
||
c['location'] = loc_id for prod in product_product_obj.browse(cr, uid, product_ids, context=c): for f in field_names: if f == 'stock_real': result[loc_id][f] += prod.qty_available elif f == 'stock_virtual': result[loc_id][f] += prod.virtual_available elif f == 'stock_real_value': amount = prod.qty_available * prod.standard_price amount = currency_obj.round(cr, uid, currency, amount) result[loc_id][f] += amount elif f == 'stock_virtual_value': amount = prod.virtual_available * prod.standard_price amount = currency_obj.round(cr, uid, currency, amount) result[loc_id][f] += amount | if loc_id in ids: c['location'] = loc_id for prod in product_product_obj.browse(cr, uid, product_ids, context=c): for f in field_names: if f == 'stock_real': result[loc_id][f] += prod.qty_available elif f == 'stock_virtual': result[loc_id][f] += prod.virtual_available elif f == 'stock_real_value': amount = prod.qty_available * prod.standard_price amount = currency_obj.round(cr, uid, currency, amount) result[loc_id][f] += amount elif f == 'stock_virtual_value': amount = prod.virtual_available * prod.standard_price amount = currency_obj.round(cr, uid, currency, amount) result[loc_id][f] += amount | def _product_value(self, cr, uid, ids, field_names, arg, context=None): """Computes stock value (real and virtual) for a product, as well as stock qty (real and virtual). @param field_names: Name of field @return: Dictionary of values """ result = dict([(i, {}.fromkeys(field_names, 0.0)) for i in ids]) | 6f2bb74f1519e7d0382e6ec1c4a25e47758f91b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/6f2bb74f1519e7d0382e6ec1c4a25e47758f91b8/stock.py |
return self._models_field_get(cr, uid, 'model_id.model', 'model_id.name', context) | return self._models_field_get(cr, uid, 'model', 'name', context) | def _models_get(self, cr, uid, context=None): return self._models_field_get(cr, uid, 'model_id.model', 'model_id.name', context) | a4247c8dc1b0dc7df1a3bb4cd1d5f01b46edf902 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a4247c8dc1b0dc7df1a3bb4cd1d5f01b46edf902/ir_property.py |
repeated_move_line_ids.append(move_line_id) | repeated_move_line_ids += move_line_id | def _search_invoices(obj, cr, uid, data, context): pool = pooler.get_pool(cr.dbname) line_obj = pool.get('account.move.line') statement_obj = pool.get('account.bank.statement') journal_obj = pool.get('account.journal') statement = statement_obj.browse(cr, uid, data['id'], context=context) args_move_line = [] repeated_move_line_ids = [] # Creating a group that is unique for importing move lines(move lines, once imported into statement lines, should not appear again) for st_line in statement.line_ids: args_move_line = [] args_move_line.append(('name','=', st_line.name)) args_move_line.append(('ref','=',st_line.ref)) if st_line.partner_id: args_move_line.append(('partner_id','=',st_line.partner_id.id)) args_move_line.append(('account_id','=',st_line.account_id.id)) move_line_id = line_obj.search(cr, uid, args_move_line,context=context) if move_line_id: repeated_move_line_ids.append(move_line_id) journal_ids = data['form']['journal_id'][0][2] if journal_ids == []: journal_ids = journal_obj.search(cr, uid, [('type', 'in', ('sale','cash','purchase'))], context=context) args = [ ('reconcile_id', '=', False), ('journal_id', 'in', journal_ids), ('account_id.reconcile', '=', True)] if repeated_move_line_ids: args.append(('id','not in',repeated_move_line_ids)) line_ids = line_obj.search(cr, uid, args, #order='date DESC, id DESC', #doesn't work context=context) FORM.string = '''<?xml version="1.0"?> | 388a073a8fcad34327e9ae4f2bf30a143b98cf24 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/388a073a8fcad34327e9ae4f2bf30a143b98cf24/wizard_statement_from_invoice.py |
c.type_id as type_id, | c.type_action as type_action, | def init(self, cr): | cfec256242a6e657c8315ac4809e0a3733b66cef /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cfec256242a6e657c8315ac4809e0a3733b66cef/crm_claim_report.py |
c.priority,c.type_id,c.date_deadline,c.date_closed | c.priority,c.type_action,c.date_deadline,c.date_closed | def init(self, cr): | cfec256242a6e657c8315ac4809e0a3733b66cef /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/cfec256242a6e657c8315ac4809e0a3733b66cef/crm_claim_report.py |
m.exdate, m.exrule from " + self._table + \ | m.exdate, m.exrule, m.recurrent_id, m.recurrent_uid from " + self._table + \ | def get_recurrent_ids(self, cr, uid, select, base_start_date, base_until_date, limit=100): """Gives virtual event ids for recurring events based on value of Recurrence Rule This method gives ids of dates that comes between start date and end date of calendar views @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param base_start_date: Get Start Date @param base_until_date: Get End Date @param limit: The Number of Results to Return """ | c8f907d5e5f7034a6f406676c31351087bb5f9ee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c8f907d5e5f7034a6f406676c31351087bb5f9ee/base_calendar.py |
result.append(idval) count += 1 | if not data['recurrent_id']: result.append(idval) count += 1 else: ex_id = real_id2base_calendar_id(data['recurrent_uid'], data['recurrent_id']) recur_dict.append(ex_id) | def get_recurrent_ids(self, cr, uid, select, base_start_date, base_until_date, limit=100): """Gives virtual event ids for recurring events based on value of Recurrence Rule This method gives ids of dates that comes between start date and end date of calendar views @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param base_start_date: Get Start Date @param base_until_date: Get End Date @param limit: The Number of Results to Return """ | c8f907d5e5f7034a6f406676c31351087bb5f9ee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c8f907d5e5f7034a6f406676c31351087bb5f9ee/base_calendar.py |
ids = result | ids = list(set(result)-set(recur_dict)) | def get_recurrent_ids(self, cr, uid, select, base_start_date, base_until_date, limit=100): """Gives virtual event ids for recurring events based on value of Recurrence Rule This method gives ids of dates that comes between start date and end date of calendar views @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param base_start_date: Get Start Date @param base_until_date: Get End Date @param limit: The Number of Results to Return """ | c8f907d5e5f7034a6f406676c31351087bb5f9ee /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c8f907d5e5f7034a6f406676c31351087bb5f9ee/base_calendar.py |
attrs.append("attrs='{'readonly':[('state','=','valid')]}'") | attrs.append('''attrs="{'readonly':[('state','=','valid')]}"''') | def fields_view_get(self, cr, uid, view_id=None, view_type='form', context={}, toolbar=False, submenu=False): journal_pool = self.pool.get('account.journal') | 6b80f6ca44206efcdf7bfa00f83f406c451ee054 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/6b80f6ca44206efcdf7bfa00f83f406c451ee054/account_move_line.py |
linktopartner = "http:\\\\"+web_server+":"+str(web_server_port)+"\\openerp\\form\\view?model=res.partner&id="+str(vals) win32gui.SendMessage(partner_link, win32con.WM_SETTEXT, 0, linktopartner) | linktopartner = "http://"+web_server+":"+str(web_server_port)+"/openerp/form/view?model=res.partner&id="+str(vals) win32gui.SendMessage(partner_link, win32con.WM_SETTEXT, 0, str(linktopartner)) | def OpenPartnerForm(txtProcessor,*args): import win32con b = check() if not b: return #Acquiring control of the text box partner_link = txtProcessor.GetControl() #Reading Current Selected Email. ex = txtProcessor.window.manager.outlook.ActiveExplorer() assert ex.Selection.Count == 1 mail = ex.Selection.Item(1) partner_text = "" try: partner_text = ustr(mail.SenderName).encode('iso-8859-1') sender_mail = ustr(mail.SenderEmailAddress).encode('iso-8859-1') except Exception: win32gui.SendMessage(partner_link, win32con.WM_SETTEXT, 0, "< Error in reading email.>") pass vals = NewConn.SearchPartner(sender_mail) if vals == True: win32gui.SendMessage(partner_link, win32con.WM_SETTEXT, 0, "< Their is contact related to "+str(partner_text)+" email address, but no partner is linked to contact>") txtProcessor.init_done=True return if vals == None: win32gui.SendMessage(partner_link, win32con.WM_SETTEXT, 0, "< No Partner found linked to "+str(partner_text)+" email address.>") txtProcessor.init_done=True return global web_server global web_server_port if web_server.strip() == "" or web_server.strip() == "http:\\\\": win32gui.SendMessage(partner_link, win32con.WM_SETTEXT, 0, " <Invalid Server Address>") txtProcessor.init_done=True return try: linktopartner = "http:\\\\"+web_server+":"+str(web_server_port)+"\\openerp\\form\\view?model=res.partner&id="+str(vals) win32gui.SendMessage(partner_link, win32con.WM_SETTEXT, 0, linktopartner) except Exception,e: win32ui.MessageBox("Error While Opening Partner.\n"+str(e),"Open Partner", flag_error) webbrowser.open_new(linktopartner) txtProcessor.init_done=True | aa13d4a64f2ff28725bbe9cbb1492e8820d276fd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/aa13d4a64f2ff28725bbe9cbb1492e8820d276fd/dialog_map.py |
result = safe_eval(submit_result) | result = safe_eval(submit_result) if submit_result else {} | def get_sys_logs(cr, uid): """ Utility method to send a publisher warranty get logs messages. """ pool = pooler.get_pool(cr.dbname) dbuuid = pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid') db_create_date = pool.get('ir.config_parameter').get_param(cr, uid, 'database.create_date') nbr_users = pool.get("res.users").search(cr, uid, [], count=True) contractosv = pool.get('publisher_warranty.contract') contracts = contractosv.browse(cr, uid, contractosv.search(cr, uid, [])) user = pool.get("res.users").browse(cr, uid, uid) msg = { "dbuuid": dbuuid, "nbr_users": nbr_users, "dbname": cr.dbname, "db_create_date": db_create_date, "version": release.version, "contracts": [c.name for c in contracts], "language": user.context_lang, } add_arg = {"timeout":30} if sys.version_info >= (2,6) else {} uo = urllib2.urlopen(config.get("publisher_warranty_url"), urllib.urlencode({'arg0': msg, "action": "update",}), **add_arg) try: submit_result = uo.read() finally: uo.close() result = safe_eval(submit_result) return result | 7cca9385860d0b006c0a5c33284ce51840a2ce16 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/7cca9385860d0b006c0a5c33284ce51840a2ce16/publisher_warranty.py |
ref_acc_bank = data.res_id | view_id_cur = data.res_id | def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') | 115dac5e71e2ad517251d87ca3ae088e104b5297 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/115dac5e71e2ad517251d87ca3ae088e104b5297/account.py |
tmp = self.pool.get('res.partner.bank').name_get(cr, uid, [line.acc_no.id])[0][1] | tmp = line.acc_name | def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') | 115dac5e71e2ad517251d87ca3ae088e104b5297 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/115dac5e71e2ad517251d87ca3ae088e104b5297/account.py |
'name': line.acc_no.bank and line.acc_no.bank.name+' '+tmp or tmp, | 'name': tmp, | def execute(self, cr, uid, ids, context=None): obj_multi = self.browse(cr, uid, ids[0]) obj_acc = self.pool.get('account.account') obj_acc_tax = self.pool.get('account.tax') obj_journal = self.pool.get('account.journal') obj_sequence = self.pool.get('ir.sequence') obj_acc_template = self.pool.get('account.account.template') obj_fiscal_position_template = self.pool.get('account.fiscal.position.template') obj_fiscal_position = self.pool.get('account.fiscal.position') data_pool = self.pool.get('ir.model.data') | 115dac5e71e2ad517251d87ca3ae088e104b5297 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/115dac5e71e2ad517251d87ca3ae088e104b5297/account.py |
import datetime import calendar | def last_day_of_current_month(): import datetime import calendar today = datetime.date.today() last_day = str(calendar.monthrange(today.year, today.month)[1]) return time.strftime('%Y-%m-' + last_day) | 77eed56346ce06b4d745389365150ee96e9798df /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/77eed56346ce06b4d745389365150ee96e9798df/orm.py |
|
import random | def __init__(self, cr): super(orm, self).__init__(cr) | 77eed56346ce06b4d745389365150ee96e9798df /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/77eed56346ce06b4d745389365150ee96e9798df/orm.py |
|
st_line = account_bank_statement_line_obj.browse(cr, uid, st_line_id.id, context) | st_line = account_bank_statement_line_obj.browse(cr, uid, st_line_id, context) | def create_move_from_st_line(self, cr, uid, st_line_id, company_currency_id, st_line_number, context=None): res_currency_obj = self.pool.get('res.currency') res_users_obj = self.pool.get('res.users') account_move_obj = self.pool.get('account.move') account_move_line_obj = self.pool.get('account.move.line') account_analytic_line_obj = self.pool.get('account.analytic.line') account_bank_statement_line_obj = self.pool.get('account.bank.statement.line') st_line = account_bank_statement_line_obj.browse(cr, uid, st_line_id.id, context) st = st_line.statement_id | 3a60e60b1d5c8b684104fb7a5e2c7e948af4b061 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3a60e60b1d5c8b684104fb7a5e2c7e948af4b061/account_bank_statement.py |
res_id = email_tool.process_email(cr, uid, server.object_id.model, data[0][1], attach=server.attach, context=context) | res_id = email_tool.process_email(cr, uid, server.object_id.model, msg, attach=server.attach, context=context) | def fetch_mail(self, cr, uid, ids, context=None): if not context: context = {} email_tool = self.pool.get('email.server.tools') for server in self.browse(cr, uid, ids, context): logger.notifyChannel('imap', netsvc.LOG_INFO, 'fetchmail start checking for new emails on %s' % (server.name)) context.update({'server_id': server.id, 'server_type': server.type}) count = 0 try: if server.type == 'imap': imap_server = None if server.is_ssl: imap_server = IMAP4_SSL(server.server, int(server.port)) else: imap_server = IMAP4(server.server, int(server.port)) | d31b61e8a2ff4ac36da7d3de5705007c78e058e4 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d31b61e8a2ff4ac36da7d3de5705007c78e058e4/fetchmail.py |
res.update({'subject': ustr(context.get('subject', case.name) or '')}) | res.update({'subject': tools.ustr(context.get('subject', case.name) or '')}) | def default_get(self, cr, uid, fields, context=None): """ This function gets default values """ if not context: context = {} | dfd29b154e1f0e158b026c9bda325a0418351869 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/dfd29b154e1f0e158b026c9bda325a0418351869/crm_send_email.py |
'name': sale.name, | 'name': sale.client_order_ref or sale.name, | def create_invoices(self, cr, uid, ids, context={}): """ To create invoices. | f3408f9853cd05906f7cd14ecaa0055a26f68e0a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f3408f9853cd05906f7cd14ecaa0055a26f68e0a/sale_make_invoice_advance.py |
if mode in ('w', 'w+', 'r+'): self._need_index = True else: self._need_index = False | if mode.endswith('b'): mode = mode[:-1] self.mode = mode | def __init__(self, parent, path, mode): nodes.node_descriptor.__init__(self, parent) self.__file = open(path, mode) if mode in ('w', 'w+', 'r+'): self._need_index = True else: self._need_index = False for attr in ('closed', 'read', 'write', 'seek', 'tell'): setattr(self,attr, getattr(self.__file, attr)) | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
if self._need_index: | if self.mode in ('w', 'w+', 'r+'): | def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self._need_index: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) pass | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
cr.execute('UPDATE ir_attachment SET index_content = %s, file_type = %s WHERE id = %s', (icont_u, mime, par.file_id)) par.content_length = filesize | fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment " \ " SET index_content = %s, file_type = %s, " \ " file_size = %s " \ " WHERE id = %s", (icont_u, mime, fsize, par.file_id)) par.content_length = fsize | def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self._need_index: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) pass | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
logging.getLogger('document.storage').debug('Cannot save file indexed content:', exc_info=True) | logging.getLogger('document.storage').warning('Cannot save file indexed content:', exc_info=True) elif self.mode in ('a', 'a+' ): try: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() fsize = os.stat(fname).st_size cr.execute("UPDATE ir_attachment SET file_size = %s " \ " WHERE id = %s", (fsize, par.file_id)) par.content_length = fsize par.content_type = mime cr.commit() cr.close() except Exception: logging.getLogger('document.storage').warning('Cannot save file appended content:', exc_info=True) | def close(self): # TODO: locking in init, close() fname = self.__file.name self.__file.close() if self._need_index: par = self._get_parent() cr = pooler.get_db(par.context.dbname).cursor() icont = '' mime = '' filename = par.path if isinstance(filename, (tuple, list)): filename = '/'.join(filename) try: mime, icont = cntIndex.doIndex(None, filename=filename, content_type=None, realfname=fname) except Exception: logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True) pass | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
if ira.file_size: self._doclog.warning( "ir.attachment raise IOError(errno.ENOENT, 'No file can be located') fpath = os.path.join(boo.path, ira.store_fname) | if mode in ('r','r+'): if ira.file_size: self._doclog.warning( "ir.attachment raise IOError(errno.ENOENT, 'No file can be located') else: store_fname = self.__get_random_fname(boo.path) cr.execute('UPDATE ir_attachment SET store_fname = %s WHERE id = %s', (store_fname, ira.id)) fpath = os.path.join(boo.path, store_fname) else: fpath = os.path.join(boo.path, ira.store_fname) | def get_file(self, cr, uid, id, file_node, mode, context=None): """ Return a file-like object for the contents of some node """ if context is None: context = {} boo = self.browse(cr, uid, id, context) if not boo.online: raise RuntimeError('media offline') ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) if boo.type == 'filestore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) raise IOError(errno.ENOENT, 'No file can be located') fpath = os.path.join(boo.path, ira.store_fname) return nodefd_file(file_node, path=fpath, mode=mode) | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
if not os.path.exists(fpath): | if (not os.path.exists(fpath)) and mode in ('r','r+'): | def get_file(self, cr, uid, id, file_node, mode, context=None): """ Return a file-like object for the contents of some node """ if context is None: context = {} boo = self.browse(cr, uid, id, context) if not boo.online: raise RuntimeError('media offline') ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) if boo.type == 'filestore': if not ira.store_fname: # On a migrated db, some files may have the wrong storage type # try to fix their directory. if ira.file_size: self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id) raise IOError(errno.ENOENT, 'No file can be located') fpath = os.path.join(boo.path, ira.store_fname) return nodefd_file(file_node, path=fpath, mode=mode) | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
flag = None if os.path.isdir(path): for dirs in os.listdir(path): if os.path.isdir(os.path.join(path, dirs)) and len(os.listdir(os.path.join(path, dirs))) < 4000: flag = dirs break flag = flag or create_directory(path) filename = random_name() fname = os.path.join(path, flag, filename) | store_fname = self.__get_random_fname(path) fname = os.path.join(path, store_fname) | def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None): """ store the data. This function MUST be used from an ir.attachment. It wouldn't make sense to store things persistently for other types (dynamic). """ if not context: context = {} boo = self.browse(cr, uid, id, context) if fil_obj: ira = fil_obj else: ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
store_fname = os.path.join(flag, filename) | def set_data(self, cr, uid, id, file_node, data, context=None, fil_obj=None): """ store the data. This function MUST be used from an ir.attachment. It wouldn't make sense to store things persistently for other types (dynamic). """ if not context: context = {} boo = self.browse(cr, uid, id, context) if fil_obj: ira = fil_obj else: ira = self.pool.get('ir.attachment').browse(cr, uid, file_node.file_id, context=context) | 31d45716ca29df2809d98d6aed3e9a5eb64d20d9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/31d45716ca29df2809d98d6aed3e9a5eb64d20d9/document_storage.py |
|
for id in ids: res[id] = {}.fromkeys(name, 0) | def _user_left_days(self, cr, uid, ids, name, args, context={}): return_false = False employee_id = False res = {} for id in ids: res[id] = {}.fromkeys(name, 0) if context and context.has_key('employee_id'): if not context['employee_id']: return_false = True employee_id = context['employee_id'] else: employee_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id','=',uid)]) if employee_ids: employee_id = employee_ids[0] else: return_false = True if employee_id: res = self.get_days(cr, uid, ids, employee_id, return_false, context=context) return res | 5709d04ce1f984e0993a693f591f547593c382b9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5709d04ce1f984e0993a693f591f547593c382b9/hr_holidays.py |
|
on (inv_line.product_id = pt.id) | on (inv_line.product_id = pp.id) | def init(self, cr): drop_view_if_exists(cr, 'report_intrastat') cr.execute(""" create or replace view report_intrastat as ( select to_char(inv.create_date, 'YYYY') as name, to_char(inv.create_date, 'MM') as month, min(inv_line.id) as id, intrastat.id as intrastat_id, upper(inv_country.code) as code, sum(case when inv_line.price_unit is not null then inv_line.price_unit * inv_line.quantity else 0 end) as value, sum( case when uom.category_id != puom.category_id then (pt.weight_net * inv_line.quantity) else (pt.weight_net * inv_line.quantity * uom.factor) end ) as weight, sum( case when uom.category_id != puom.category_id then inv_line.quantity else (inv_line.quantity * uom.factor) end ) as supply_units, | eb55200a3e873edad3a3ca98b974129457f7f111 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/eb55200a3e873edad3a3ca98b974129457f7f111/report_intrastat.py |
'state_id':int(payload.country_id), | 'state_id':int(payload.state_id), | def execute(self, cr, uid, ids, context=None): assert len(ids) == 1, "We should only get one object from the form" payload = self.browse(cr, uid, ids[0], context=context) if not getattr(payload, 'company_id', None): raise ValueError('Case where no default main company is setup ' 'not handled yet') | f4b8f2776bfac105b2f02e5f56c7d352c22d50c0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f4b8f2776bfac105b2f02e5f56c7d352c22d50c0/todo.py |
if move.move_dest_id.picking_id: | if context.get('call_unlink',False) and move.move_dest_id.picking_id: | def action_cancel(self, cr, uid, ids, context={}): if not len(ids): return True pickings = {} for move in self.browse(cr, uid, ids): if move.state in ('confirmed', 'waiting', 'assigned', 'draft'): if move.picking_id: pickings[move.picking_id.id] = True if move.move_dest_id and move.move_dest_id.state == 'waiting': self.write(cr, uid, [move.move_dest_id.id], {'state': 'assigned'}) if move.move_dest_id.picking_id: wf_service = netsvc.LocalService("workflow") wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr) self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}) | a4ca4cf7b4dce3f22cad1caf948b51375a3a0fbd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a4ca4cf7b4dce3f22cad1caf948b51375a3a0fbd/stock.py |
for pick in self.pool.get('stock.picking').browse(cr, uid, pickings.keys()): if all(move.state == 'cancel' for move in pick.move_lines): self.pool.get('stock.picking').write(cr, uid, [pick.id], {'state': 'cancel'}) | if not context.get('call_unlink',False): for pick in self.pool.get('stock.picking').browse(cr, uid, pickings.keys()): if all(move.state == 'cancel' for move in pick.move_lines): self.pool.get('stock.picking').write(cr, uid, [pick.id], {'state': 'cancel'}) | def action_cancel(self, cr, uid, ids, context={}): if not len(ids): return True pickings = {} for move in self.browse(cr, uid, ids): if move.state in ('confirmed', 'waiting', 'assigned', 'draft'): if move.picking_id: pickings[move.picking_id.id] = True if move.move_dest_id and move.move_dest_id.state == 'waiting': self.write(cr, uid, [move.move_dest_id.id], {'state': 'assigned'}) if move.move_dest_id.picking_id: wf_service = netsvc.LocalService("workflow") wf_service.trg_write(uid, 'stock.picking', move.move_dest_id.picking_id.id, cr) self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}) | a4ca4cf7b4dce3f22cad1caf948b51375a3a0fbd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a4ca4cf7b4dce3f22cad1caf948b51375a3a0fbd/stock.py |
'create_uid': fields.many2one('res.users', 'Author', select=True), 'create_date': fields.datetime("Created on", select=True), 'write_date': fields.datetime("Modification Date", select=True), | 'create_uid': fields.many2one('res.users', 'Author', select=True, readonly=True), 'create_date': fields.datetime("Created on", select=True, readonly=True), 'write_date': fields.datetime("Modification Date", select=True, readonly=True), | def open_wiki_page(self, cr, uid, ids, context): | fe0e28c1574eddef05da9875369dbd44da9b149d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fe0e28c1574eddef05da9875369dbd44da9b149d/wiki.py |
if 'create_uid' in vals: del vals['create_uid'] | def create(self, cr, uid, vals, context=None): | fe0e28c1574eddef05da9875369dbd44da9b149d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fe0e28c1574eddef05da9875369dbd44da9b149d/wiki.py |
|
def createInstance(cls, pool, module, cr): name = getattr(cls, '_name', cls._inherit) | def createInstance(cls, pool, module, cr): | def createInstance(cls, pool, module, cr): name = getattr(cls, '_name', cls._inherit) parent_names = getattr(cls, '_inherit', None) if parent_names: for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]): parent_class = pool.get(parent_name).__class__ assert pool.get(parent_name), "parent class %s does not exist in module %s !" % (parent_name, module) nattr = {} for s in ('_columns', '_defaults'): new = copy.copy(getattr(pool.get(parent_name), s)) if hasattr(new, 'update'): new.update(cls.__dict__.get(s, {})) else: new.extend(cls.__dict__.get(s, [])) nattr[s] = new name = getattr(cls, '_name', cls._inherit) cls = type(name, (cls, parent_class), nattr) | a0be2f8e46a52320096255092dbceb06a4ee555e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a0be2f8e46a52320096255092dbceb06a4ee555e/osv.py |
parent_names = getattr(cls, '_inherit', None) | parent_names = getattr(cls, '_inherit', None) | def createInstance(cls, pool, module, cr): parent_names = getattr(cls, '_inherit', None) if parent_names: for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]): parent_class = pool.get(parent_name).__class__ assert pool.get(parent_name), "parent class %s does not exist in module %s !" % (parent_name, module) nattr = {} for s in ('_columns', '_defaults', '_inherits', '_constraints', '_sql_constraints'): new = copy.copy(getattr(pool.get(parent_name), s)) if hasattr(new, 'update'): new.update(cls.__dict__.get(s, {})) else: if s=='_constraints': for c in cls.__dict__.get(s, []): exist = False for c2 in range(len(new)): if new[c2][2]==c[2]: new[c2] = c exist = True break if not exist: new.append(c) else: new.extend(cls.__dict__.get(s, [])) nattr[s] = new name = getattr(cls, '_name', cls._inherit) cls = type(name, (cls, parent_class), nattr) obj = object.__new__(cls) obj.__init__(pool, cr) return obj | a0be2f8e46a52320096255092dbceb06a4ee555e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a0be2f8e46a52320096255092dbceb06a4ee555e/osv.py |
nattr[s] = new name = getattr(cls, '_name', cls._inherit) | nattr[s] = new name = hasattr(cls, '_name') and cls._name or cls._inherit | def createInstance(cls, pool, module, cr): parent_names = getattr(cls, '_inherit', None) if parent_names: for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]): parent_class = pool.get(parent_name).__class__ assert pool.get(parent_name), "parent class %s does not exist in module %s !" % (parent_name, module) nattr = {} for s in ('_columns', '_defaults', '_inherits', '_constraints', '_sql_constraints'): new = copy.copy(getattr(pool.get(parent_name), s)) if hasattr(new, 'update'): new.update(cls.__dict__.get(s, {})) else: if s=='_constraints': for c in cls.__dict__.get(s, []): exist = False for c2 in range(len(new)): if new[c2][2]==c[2]: new[c2] = c exist = True break if not exist: new.append(c) else: new.extend(cls.__dict__.get(s, [])) nattr[s] = new name = getattr(cls, '_name', cls._inherit) cls = type(name, (cls, parent_class), nattr) obj = object.__new__(cls) obj.__init__(pool, cr) return obj | a0be2f8e46a52320096255092dbceb06a4ee555e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a0be2f8e46a52320096255092dbceb06a4ee555e/osv.py |
body=None | body=self._get_body() if self._l_isLocked(uri): return self.send_body(None, '423', 'Locked', 'Locked') ct=None if self.headers.has_key("Content-Type"): ct=self.headers['Content-Type'] try: location = dc.put(uri,body,ct) except DAV_Error, (ec,dd): return self.send_status(ec) headers = {} if location: headers['Location'] = location try: etag = dc.get_prop(location or uri, "DAV:", "getetag") headers['ETag'] = etag except: pass self.send_body(None, '201', 'Created', '', headers=headers) def _get_body(self): body = None | def do_PUT(self): dc=self.IFACE_CLASS uri=urlparse.urljoin(self.get_baseuri(dc), self.path) uri=urllib.unquote(uri) # Handle If-Match if self.headers.has_key('If-Match'): test = False etag = None for match in self.headers['If-Match'].split(','): if match == '*': if dc.exists(uri): test = True break else: if dc.match_prop(uri, match, "DAV:", "getetag"): test = True break if not test: self.send_status(412) return | c02cd022e4531ac05001ca7909fbd037cb4d4627 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c02cd022e4531ac05001ca7909fbd037cb4d4627/webdav_server.py |
if self._l_isLocked(uri): return self.send_body(None, '423', 'Locked', 'Locked') ct=None if self.headers.has_key("Content-Type"): ct=self.headers['Content-Type'] try: location = dc.put(uri,body,ct) except DAV_Error, (ec,dd): return self.send_status(ec) headers = {} if location: headers['Location'] = location try: etag = dc.get_prop(location or uri, "DAV:", "getetag") headers['ETag'] = etag except: pass self.send_body(None, '201', 'Created', '', headers=headers) | return body | def do_PUT(self): dc=self.IFACE_CLASS uri=urlparse.urljoin(self.get_baseuri(dc), self.path) uri=urllib.unquote(uri) # Handle If-Match if self.headers.has_key('If-Match'): test = False etag = None for match in self.headers['If-Match'].split(','): if match == '*': if dc.exists(uri): test = True break else: if dc.match_prop(uri, match, "DAV:", "getetag"): test = True break if not test: self.send_status(412) return | c02cd022e4531ac05001ca7909fbd037cb4d4627 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/c02cd022e4531ac05001ca7909fbd037cb4d4627/webdav_server.py |
raise osv.except_osv(_('Error !'), _('You can not duplicate a timesheet !')) | raise osv.except_osv(_('Error !'), _('You cannot duplicate a timesheet !')) | def copy(self, cr, uid, ids, *args, **argv): raise osv.except_osv(_('Error !'), _('You can not duplicate a timesheet !')) | 53411e06867a5f971962c0e95fe9aa102f760efe /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/53411e06867a5f971962c0e95fe9aa102f760efe/hr_timesheet_sheet.py |
if not res: | if context.get('bank_statement', False) and not res: | def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): res = super(account_coda, self).search(cr, user, args=args, offset=offset, limit=limit, order=order, context=context, count=count) if not res: raise osv.except_osv('Error', _('Coda file not found for bank statement !!')) return res | b7eb35f51588db1a1c410a5727bf5ef2c29a0d4a /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b7eb35f51588db1a1c410a5727bf5ef2c29a0d4a/account_coda.py |
'web_icon': fields.char('Icon File', size=128), 'web_icon_hover':fields.char('Icon Over File', size=128), 'web_icon_data': fields.function(_get_image_icon, string='Web Icons', type='binary', method=True, readonly=True, store=True, multi='icon'), 'web_icon_hover_data':fields.function(_get_image_icon, string='Web Icons Over', type='binary', method=True, readonly=True, store=True,multi='icon'), | 'web_icon': fields.char('Web Icon File', size=128), 'web_icon_hover':fields.char('Web Icon File (hover)', size=128), 'web_icon_data': fields.function(_get_image_icon, string='Web Icon Image', type='binary', method=True, readonly=True, store=True, multi='icon'), 'web_icon_hover_data':fields.function(_get_image_icon, string='Web Icon Image (hover)', type='binary', method=True, readonly=True, store=True, multi='icon'), | def _get_image_icon(self, cr, uid, ids, name, args, context=None): res = {} for menu in self.browse(cr, uid, ids, context=context): res[menu.id] = { 'web_icon_data': False, 'web_icon_hover_data': False, } if not menu.parent_id: if menu.web_icon_hover: res[menu.id]['web_icon_hover_data'] = self.read_image(menu.web_icon_hover) if menu.web_icon: res[menu.id]['web_icon_data'] = self.read_image(menu.web_icon) return res | 0e00f5c5a6811d50110d99e325cfa0d1aff4c2a9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0e00f5c5a6811d50110d99e325cfa0d1aff4c2a9/ir_ui_menu.py |
except: self.threads.remove(self) self.running = False return False try: | def run(self): self.running = True try: ts = tiny_socket.mysocket(self.sock) except: self.threads.remove(self) self.running = False return False while self.running: try: msg = ts.myreceive() except: self.threads.remove(self) self.running = False return False try: result = self.dispatch(msg[0], msg[1], msg[2:]) ts.mysend(result) except netsvc.OpenERPDispatcherException, e: try: new_e = Exception(tools.exception_to_unicode(e.exception)) # avoid problems of pickeling ts.mysend(new_e, exception=True, traceback=e.traceback) except: self.running = False break except Exception, e: # this code should not be reachable, therefore we warn netsvc.Logger().notifyChannel("net-rpc", netsvc.LOG_WARNING, "exception: %s" % str(e)) break | 983ace0a078cbb2228f8bfac05ad9e6ab6389dd8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/983ace0a078cbb2228f8bfac05ad9e6ab6389dd8/netrpc_server.py |
|
except: self.running = False | except Exception: logging.getLogger('web-services').exception("netrpc: cannot deliver exception message to client") | def run(self): self.running = True try: ts = tiny_socket.mysocket(self.sock) except: self.threads.remove(self) self.running = False return False while self.running: try: msg = ts.myreceive() except: self.threads.remove(self) self.running = False return False try: result = self.dispatch(msg[0], msg[1], msg[2:]) ts.mysend(result) except netsvc.OpenERPDispatcherException, e: try: new_e = Exception(tools.exception_to_unicode(e.exception)) # avoid problems of pickeling ts.mysend(new_e, exception=True, traceback=e.traceback) except: self.running = False break except Exception, e: # this code should not be reachable, therefore we warn netsvc.Logger().notifyChannel("net-rpc", netsvc.LOG_WARNING, "exception: %s" % str(e)) break | 983ace0a078cbb2228f8bfac05ad9e6ab6389dd8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/983ace0a078cbb2228f8bfac05ad9e6ab6389dd8/netrpc_server.py |
netsvc.Logger().notifyChannel("net-rpc", netsvc.LOG_WARNING, "exception: %s" % str(e)) break | try: tb = getattr(e, 'traceback', sys.exc_info()) tb_s = "".join(traceback.format_exception(*tb)) logging.getLogger('web-services').debug("netrpc: communication-level exception", exc_info=True) ts.mysend(e, exception=True, traceback=tb_s) except Exception, ex: logging.getLogger('web-services').exception("netrpc: cannot deliver exception message to client") break | def run(self): self.running = True try: ts = tiny_socket.mysocket(self.sock) except: self.threads.remove(self) self.running = False return False while self.running: try: msg = ts.myreceive() except: self.threads.remove(self) self.running = False return False try: result = self.dispatch(msg[0], msg[1], msg[2:]) ts.mysend(result) except netsvc.OpenERPDispatcherException, e: try: new_e = Exception(tools.exception_to_unicode(e.exception)) # avoid problems of pickeling ts.mysend(new_e, exception=True, traceback=e.traceback) except: self.running = False break except Exception, e: # this code should not be reachable, therefore we warn netsvc.Logger().notifyChannel("net-rpc", netsvc.LOG_WARNING, "exception: %s" % str(e)) break | 983ace0a078cbb2228f8bfac05ad9e6ab6389dd8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/983ace0a078cbb2228f8bfac05ad9e6ab6389dd8/netrpc_server.py |
import logging | def run(self): try: self.running = True while self.running: timeout = self.socket.gettimeout() fd_sets = select.select([self.socket], [], [], timeout) if not fd_sets[0]: continue (clientsocket, address) = self.socket.accept() ct = TinySocketClientThread(clientsocket, self.threads) clientsocket = None self.threads.append(ct) ct.start() lt = len(self.threads) if (lt > 10) and (lt % 10 == 0): # Not many threads should be serving at the same time, so log # their abuse. netsvc.Logger().notifyChannel("web-services", netsvc.LOG_DEBUG, "Netrpc: %d threads" % len(self.threads)) self.socket.close() except Exception, e: import logging logging.getLogger('web-services').warning("Netrpc: closing because of exception %s" % str(e)) self.socket.close() return False | 983ace0a078cbb2228f8bfac05ad9e6ab6389dd8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/983ace0a078cbb2228f8bfac05ad9e6ab6389dd8/netrpc_server.py |
|
childs = self.search(cr, uid, [('parent_id', '=', parent_id), ('active', 'in', [True, False])]) | childs = self.search(cr, uid, [('parent_id', '=', parent_id)]) | def on_change_parent(self, cr, uid, id, parent_id): if not parent_id: return {} parent = self.read(cr, uid, [parent_id], ['partner_id','code'])[0] childs = self.search(cr, uid, [('parent_id', '=', parent_id), ('active', 'in', [True, False])]) numchild = len(childs) if parent['partner_id']: partner = parent['partner_id'][0] else: partner = False res = {'value' : {'code' : '%s - %03d' % (parent['code'] or '', numchild + 1),}} if partner: res['value']['partner_id'] = partner return res | d96501aa74af87522161d122ef0096a48c8fd889 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d96501aa74af87522161d122ef0096a48c8fd889/project.py |
if picking.sale_id and picking.sale_id.note: if picking.note: return picking.note + '\n' + picking.sale_id.note else: return picking.sale_id.note | if picking.note or (picking.sale_id and picking.sale_id.note): return picking.note or picking.sale_id.note | def _get_comment_invoice(self, cursor, user, picking): if picking.sale_id and picking.sale_id.note: if picking.note: return picking.note + '\n' + picking.sale_id.note else: return picking.sale_id.note return super(stock_picking, self)._get_comment_invoice(cursor, user, picking) | 28a60e9495921f73cc8a391a9abec4344a24711e /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/28a60e9495921f73cc8a391a9abec4344a24711e/stock.py |
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(account_bs_report, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False) doc = etree.XML(res['arch']) nodes = doc.xpath("//field[@name='journal_ids']") for node in nodes: node.set('readonly', '1') node.set('required', '0') res['arch'] = etree.tostring(doc) return res | def onchange_chart_id(self, cr, uid, chart_id, context=None): if not chart_id: return False account = self.pool.get('account.account').browse(cr, uid, chart_id , context=context) if not account.company_id.property_reserve_and_surplus_account: return False return { 'value': {'reserve_account_id': account.company_id.property_reserve_and_surplus_account.id}} | def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(account_bs_report, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False) doc = etree.XML(res['arch']) nodes = doc.xpath("//field[@name='journal_ids']") for node in nodes: node.set('readonly', '1') node.set('required', '0') res['arch'] = etree.tostring(doc) return res | 944a8f501efbac9eee2f57c6542a17efd8488b07 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/944a8f501efbac9eee2f57c6542a17efd8488b07/account_report_balance_sheet.py |
account = self.pool.get('account.account').browse(cr, uid, data['form']['chart_account_id'], context=context) if not account.company_id.property_reserve_and_surplus_account: raise osv.except_osv(_('Warning'),_('Please define the Reserve and Profit/Loss account for current user company !')) data['form']['reserve_account_id'] = account.company_id.property_reserve_and_surplus_account.id data['form'].update(self.read(cr, uid, ids, ['display_type'])[0]) | def _print_report(self, cr, uid, ids, data, context=None): if context is None: context = {} data = self.pre_print_report(cr, uid, ids, data, context=context) account = self.pool.get('account.account').browse(cr, uid, data['form']['chart_account_id'], context=context) if not account.company_id.property_reserve_and_surplus_account: raise osv.except_osv(_('Warning'),_('Please define the Reserve and Profit/Loss account for current user company !')) data['form']['reserve_account_id'] = account.company_id.property_reserve_and_surplus_account.id data['form'].update(self.read(cr, uid, ids, ['display_type'])[0]) if data['form']['display_type']: return { 'type': 'ir.actions.report.xml', 'report_name': 'account.balancesheet.horizontal', 'datas': data, } else: return { 'type': 'ir.actions.report.xml', 'report_name': 'account.balancesheet', 'datas': data, } | 944a8f501efbac9eee2f57c6542a17efd8488b07 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/944a8f501efbac9eee2f57c6542a17efd8488b07/account_report_balance_sheet.py |
|
def _print_report(self, cr, uid, ids, data, context=None): if context is None: context = {} data = self.pre_print_report(cr, uid, ids, data, context=context) account = self.pool.get('account.account').browse(cr, uid, data['form']['chart_account_id'], context=context) if not account.company_id.property_reserve_and_surplus_account: raise osv.except_osv(_('Warning'),_('Please define the Reserve and Profit/Loss account for current user company !')) data['form']['reserve_account_id'] = account.company_id.property_reserve_and_surplus_account.id data['form'].update(self.read(cr, uid, ids, ['display_type'])[0]) if data['form']['display_type']: return { 'type': 'ir.actions.report.xml', 'report_name': 'account.balancesheet.horizontal', 'datas': data, } else: return { 'type': 'ir.actions.report.xml', 'report_name': 'account.balancesheet', 'datas': data, } | 944a8f501efbac9eee2f57c6542a17efd8488b07 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/944a8f501efbac9eee2f57c6542a17efd8488b07/account_report_balance_sheet.py |
||
except: logger.warn('translation went wrong for string %s', repr(source)) | except Exception: logger.debug('translation went wrong for string %s', repr(source)) | def __call__(self, source): is_new_cr = False res = source try: frame = inspect.stack()[1][0] cr, is_new_cr = self._get_cr(frame) lang = self._get_lang(frame) if lang and cr: cr.execute('SELECT value FROM ir_translation WHERE lang=%s AND type IN (%s, %s) AND src=%s', (lang, 'code','sql_constraint', source)) res_trans = cr.fetchone() res = res_trans and res_trans[0] or source except: logger.warn('translation went wrong for string %s', repr(source)) finally: if is_new_cr: cr.close() return res | 3af1a322d4a246a20df58bb9541b1671231e31ab /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3af1a322d4a246a20df58bb9541b1671231e31ab/translate.py |
if hasattr(self.pool.get(model), name): | proxy = self.pool.get(model) if hasattr(proxy, name): attribute = getattr(proxy, name) if not hasattr(attribute, '__call__'): return attribute | def __getattr__(self, name): for model, field in self._inherits.iteritems(): if hasattr(self.pool.get(model), name): break else: return super(orm, self).__getattr__(name) | 3aca2aea6be0a69e44f2c91121c40eb244b7b047 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3aca2aea6be0a69e44f2c91121c40eb244b7b047/orm.py |
proxy = self.pool.get(model) | def _proxy(cr, uid, ids, *args, **kwargs): objects = self.browse(cr, uid, ids, kwargs.get('context', None)) lst = [obj[field].id for obj in objects if obj[field]] proxy = self.pool.get(model) return getattr(proxy, name)(cr, uid, lst, *args, **kwargs) | 3aca2aea6be0a69e44f2c91121c40eb244b7b047 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3aca2aea6be0a69e44f2c91121c40eb244b7b047/orm.py |
|
def _ellipsis(self, orig_str, maxlen=100, ellipsis='...'): maxlen = maxlen - len(ellipsis) if maxlen <= 0: maxlen = 1 new_str = orig_str[:maxlen] return new_str | def _ellipsis(self, char, size=100, truncation_str='...'): if len(char) <= size: return char return char[:size-len(truncation_str)] + truncation_str | def _ellipsis(self, orig_str, maxlen=100, ellipsis='...'): maxlen = maxlen - len(ellipsis) if maxlen <= 0: maxlen = 1 new_str = orig_str[:maxlen] return new_str | 4c8a58015ae2edfc87e5e65dce1b651a21e26f1d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4c8a58015ae2edfc87e5e65dce1b651a21e26f1d/account_general_ledger.py |
return self._ellipsis(name, maxlen, ' ...') | return self._ellipsis(name, maxlen) | def _strip_name(self, name, maxlen=50): return self._ellipsis(name, maxlen, ' ...') | 4c8a58015ae2edfc87e5e65dce1b651a21e26f1d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4c8a58015ae2edfc87e5e65dce1b651a21e26f1d/account_general_ledger.py |
def _get_sortby(self, data): if self.sortby == 'sort_date': return 'Date' elif self.sortby == 'sort_journal_partner': return 'Journal & Partner' return 'Date' | 4c8a58015ae2edfc87e5e65dce1b651a21e26f1d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4c8a58015ae2edfc87e5e65dce1b651a21e26f1d/account_general_ledger.py |
||
return {} | return res res = {'account_id':False} | def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'): if not partner_id: return {} partner_pool = self.pool.get('res.partner') res = {'account_id':False} if pay_now == 'pay_later': partner = partner_pool.browse(cr, uid, partner_id) if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } | 59f2e4dcbd39996b551f9f9cfa759a367b7ca48c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/59f2e4dcbd39996b551f9f9cfa759a367b7ca48c/voucher.py |
res = {'account_id':False} | journal_pool = self.pool.get('account.journal') | def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'): if not partner_id: return {} partner_pool = self.pool.get('res.partner') res = {'account_id':False} if pay_now == 'pay_later': partner = partner_pool.browse(cr, uid, partner_id) if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } | 59f2e4dcbd39996b551f9f9cfa759a367b7ca48c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/59f2e4dcbd39996b551f9f9cfa759a367b7ca48c/voucher.py |
if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } | journal = journal_pool.browse(cr, uid, journal_id) if journal.type in ('sale','sale_refund'): account_id = partner.property_account_receivable.id elif journal.type in ('purchase', 'purchase_refund','expense'): account_id = partner.property_account_payable.id else: account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id res['account_id'] = account_id return {'value':res} | def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'): if not partner_id: return {} partner_pool = self.pool.get('res.partner') res = {'account_id':False} if pay_now == 'pay_later': partner = partner_pool.browse(cr, uid, partner_id) if ttype == 'sale': res.update({ 'account_id':partner.property_account_receivable.id, }) elif ttype == 'purchase': res.update({ 'account_id':partner.property_account_payable.id, }) return { 'value':res } | 59f2e4dcbd39996b551f9f9cfa759a367b7ca48c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/59f2e4dcbd39996b551f9f9cfa759a367b7ca48c/voucher.py |
netsvc.Logger().notifyChannel( "web-services", netsvc.LOG_INFO, "starting HTTPS service at %s port %d" % (interface or '0.0.0.0', port,)) | def __init__(self, interface, port, handler): threading.Thread.__init__(self) netsvc.Server.__init__(self) self.__port = port self.__interface = interface | 6747bdc080d1b960f8c0863fa98de14ecf3e22f3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/6747bdc080d1b960f8c0863fa98de14ecf3e22f3/http_server.py |
|
'no_of_employee': fields.integer('No of Employees', help='Number of employee there are already in the department', readonly=True), 'no_of_recruitment': fields.integer('No of Recruitment'), | 'no_of_employee': fields.integer('No of Employees', help='Number of employee there are already in the department'), 'no_of_recruitment': fields.integer('No of Recruitment', readonly=True), | def _check_recursion(self, cr, uid, ids, context=None): level = 100 while len(ids): cr.execute('select distinct parent_id from hr_employee_category where id IN %s', (tuple(ids), )) ids = filter(None, map(lambda x:x[0], cr.fetchall())) if not level: return False level -= 1 return True | bef860db74f516356dbb4f701e076b35f930f8bd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/bef860db74f516356dbb4f701e076b35f930f8bd/hr.py |
def on_change_expected_employee(self, cr, uid, ids, expected_employee, context=None): | def on_change_expected_employee(self, cr, uid, ids, expected_employee, no_of_employee, context=None): | def on_change_expected_employee(self, cr, uid, ids, expected_employee, context=None): if context is None: context = {} result={} if expected_employee: xx = self.browse(cr, uid, ids, context)[0] result['no_of_recruitment'] = expected_employee - xx['no_of_employee'] return {'value': result} | bef860db74f516356dbb4f701e076b35f930f8bd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/bef860db74f516356dbb4f701e076b35f930f8bd/hr.py |
xx = self.browse(cr, uid, ids, context)[0] result['no_of_recruitment'] = expected_employee - xx['no_of_employee'] | result['no_of_recruitment'] = expected_employee - no_of_employee | def on_change_expected_employee(self, cr, uid, ids, expected_employee, context=None): if context is None: context = {} result={} if expected_employee: xx = self.browse(cr, uid, ids, context)[0] result['no_of_recruitment'] = expected_employee - xx['no_of_employee'] return {'value': result} | bef860db74f516356dbb4f701e076b35f930f8bd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/bef860db74f516356dbb4f701e076b35f930f8bd/hr.py |
ids_dept = obj_dept.search(cr, uid, [('member_ids', 'in', [user_id])], context=context) | emp_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', user_id)]) cr.execute('SELECT emp.department_id FROM hr_employee AS emp JOIN resource_resource AS res ON res.id = emp.resource_id \ WHERE res.user_id = %s AND emp.department_id IS NOT NULL', (user_id,)) ids_dept = [x[0] for x in cr.fetchall()] | def _parent_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} result = {} obj_dept = self.pool.get('hr.department') for user_id in ids: ids_dept = obj_dept.search(cr, uid, [('member_ids', 'in', [user_id])], context=context) parent_ids = [] if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['manager_id'], context=context) parent_ids = map(lambda x: x['manager_id'][0], data_dept) result[user_id] = parent_ids return result | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
return [('id', 'in', child_ids.get(uid,[]))] | return [('id', 'in', child_ids.get(uid, []))] | def _parent_search(self, cr, uid, obj, name, args, context=None): if context is None: context = {} parent = [] for arg in args: if arg[0] == 'parent_id': parent = arg[2] child_ids = self._child_compute(cr, uid, parent, name, args, context=context) if not child_ids: return [('id', 'in', [0])] return [('id', 'in', child_ids.get(uid,[]))] | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
for manager_id in ids: | for user_id in ids: | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) | cr.execute('SELECT dept.id FROM hr_department AS dept \ LEFT JOIN hr_employee AS emp ON dept.manager_id = emp.id \ WHERE emp.id IN \ (SELECT emp.id FROM hr_employee \ JOIN resource_resource r ON r.id = emp.resource_id WHERE r.user_id=' + str(user_id) + ') ') mgnt_dept_ids = [x[0] for x in cr.fetchall()] | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
if manager_id in childs: childs.remove(manager_id) | if user_id in childs: childs.remove(user_id) | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids | child_ids = set.keys() result[user_id] = child_ids | def _child_compute(self, cr, uid, ids, name, args, context=None): if context is None: context = {} obj_dept = self.pool.get('hr.department') obj_user = self.pool.get('res.users') result = {} for manager_id in ids: child_ids = [] mgnt_dept_ids = obj_dept.search(cr, uid, [('manager_id', '=', manager_id)], context=context) ids_dept = obj_dept.search(cr, uid, [('id', 'child_of', mgnt_dept_ids)], context=context) if ids_dept: data_dept = obj_dept.read(cr, uid, ids_dept, ['member_ids'], context=context) childs = map(lambda x: x['member_ids'], data_dept) childs = tools.flatten(childs) childs = obj_user.search(cr, uid, [('id', 'in', childs),('active', '=', True)], context=context) if manager_id in childs: childs.remove(manager_id) child_ids.extend(tools.flatten(childs)) set = {} map(set.__setitem__, child_ids, []) child_ids = set.keys() else: child_ids = [] result[manager_id] = child_ids return result | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
return [('id', 'in', child_ids.get(uid,[]))] | return [('id', 'in', child_ids.get(uid, []))] | def _child_search(self, cr, uid, obj, name, args, context=None): if context is None: context = {} parent = [] for arg in args: if arg[0] == 'child_ids': parent = arg[2] child_ids = self._child_compute(cr, uid, parent, name, args, context=context) if not child_ids: return [('id', 'in', [0])] return [('id', 'in', child_ids.get(uid,[]))] | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
def _child_search(self, cr, uid, obj, name, args, context=None): if context is None: context = {} parent = [] for arg in args: if arg[0] == 'child_ids': parent = arg[2] child_ids = self._child_compute(cr, uid, parent, name, args, context=context) if not child_ids: return [('id', 'in', [0])] return [('id', 'in', child_ids.get(uid,[]))] | 096ec13381cfb6869abdf9d9e6d69469c89d8678 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/096ec13381cfb6869abdf9d9e6d69469c89d8678/hr_department.py |
||
fnode.set_data(cr, data, fil) | if data is not None: fnode.set_data(cr, data, fil) | def create_child(self, cr, path, data): """ API function to create a child file object and node Return the node_* created """ dirobj = self.context._dirobj uid = self.context.uid ctx = self.context.context.copy() ctx.update(self.dctx) fil_obj=dirobj.pool.get('ir.attachment') val = { 'name': path, 'datas_fname': path, 'parent_id': self.dir_id, # Datas are not set here } | df40ff60dcabc12c3d806ce6715097f4290087a1 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/df40ff60dcabc12c3d806ce6715097f4290087a1/nodes.py |
'company': user.company_id, | 'setCompany': self.setCompany, | def __init__(self, cr, uid, name, parents=rml_parents, tag=rml_tag, context=None): if not context: context={} self.cr = cr self.uid = uid self.pool = pooler.get_pool(cr.dbname) user = self.pool.get('res.users').browse(cr, uid, uid, context=context) self.localcontext = { 'user': user, 'company': user.company_id, 'repeatIn': self.repeatIn, 'setLang': self.setLang, 'setTag': self.setTag, 'removeParentNode': self.removeParentNode, 'format': self.format, 'formatLang': self.formatLang, 'logo' : user.company_id.logo, 'lang' : user.company_id.partner_id.lang, 'translate' : self._translate, 'setHtmlImage' : self.set_html_image, 'strip_name' : self._strip_name, 'time' : time } self.localcontext.update(context) self.rml_header = user.company_id.rml_header self.rml_header2 = user.company_id.rml_header2 self.rml_header3 = user.company_id.rml_header3 self.logo = user.company_id.logo self.name = name self._node = None self.parents = parents self.tag = tag self._lang_cache = {} self.lang_dict = {} self.default_lang = {} self.lang_dict_called = False self._transl_regex = re.compile('(\[\[.+?\]\])') | 8233e8b505ef64ce98d82ac439427872302f0be6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/8233e8b505ef64ce98d82ac439427872302f0be6/report_sxw.py |
'logo' : user.company_id.logo, | def __init__(self, cr, uid, name, parents=rml_parents, tag=rml_tag, context=None): if not context: context={} self.cr = cr self.uid = uid self.pool = pooler.get_pool(cr.dbname) user = self.pool.get('res.users').browse(cr, uid, uid, context=context) self.localcontext = { 'user': user, 'company': user.company_id, 'repeatIn': self.repeatIn, 'setLang': self.setLang, 'setTag': self.setTag, 'removeParentNode': self.removeParentNode, 'format': self.format, 'formatLang': self.formatLang, 'logo' : user.company_id.logo, 'lang' : user.company_id.partner_id.lang, 'translate' : self._translate, 'setHtmlImage' : self.set_html_image, 'strip_name' : self._strip_name, 'time' : time } self.localcontext.update(context) self.rml_header = user.company_id.rml_header self.rml_header2 = user.company_id.rml_header2 self.rml_header3 = user.company_id.rml_header3 self.logo = user.company_id.logo self.name = name self._node = None self.parents = parents self.tag = tag self._lang_cache = {} self.lang_dict = {} self.default_lang = {} self.lang_dict_called = False self._transl_regex = re.compile('(\[\[.+?\]\])') | 8233e8b505ef64ce98d82ac439427872302f0be6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/8233e8b505ef64ce98d82ac439427872302f0be6/report_sxw.py |
|
'time' : time | 'time' : time, | def __init__(self, cr, uid, name, parents=rml_parents, tag=rml_tag, context=None): if not context: context={} self.cr = cr self.uid = uid self.pool = pooler.get_pool(cr.dbname) user = self.pool.get('res.users').browse(cr, uid, uid, context=context) self.localcontext = { 'user': user, 'company': user.company_id, 'repeatIn': self.repeatIn, 'setLang': self.setLang, 'setTag': self.setTag, 'removeParentNode': self.removeParentNode, 'format': self.format, 'formatLang': self.formatLang, 'logo' : user.company_id.logo, 'lang' : user.company_id.partner_id.lang, 'translate' : self._translate, 'setHtmlImage' : self.set_html_image, 'strip_name' : self._strip_name, 'time' : time } self.localcontext.update(context) self.rml_header = user.company_id.rml_header self.rml_header2 = user.company_id.rml_header2 self.rml_header3 = user.company_id.rml_header3 self.logo = user.company_id.logo self.name = name self._node = None self.parents = parents self.tag = tag self._lang_cache = {} self.lang_dict = {} self.default_lang = {} self.lang_dict_called = False self._transl_regex = re.compile('(\[\[.+?\]\])') | 8233e8b505ef64ce98d82ac439427872302f0be6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/8233e8b505ef64ce98d82ac439427872302f0be6/report_sxw.py |
self.rml_header = user.company_id.rml_header self.rml_header2 = user.company_id.rml_header2 self.rml_header3 = user.company_id.rml_header3 self.logo = user.company_id.logo | def __init__(self, cr, uid, name, parents=rml_parents, tag=rml_tag, context=None): if not context: context={} self.cr = cr self.uid = uid self.pool = pooler.get_pool(cr.dbname) user = self.pool.get('res.users').browse(cr, uid, uid, context=context) self.localcontext = { 'user': user, 'company': user.company_id, 'repeatIn': self.repeatIn, 'setLang': self.setLang, 'setTag': self.setTag, 'removeParentNode': self.removeParentNode, 'format': self.format, 'formatLang': self.formatLang, 'logo' : user.company_id.logo, 'lang' : user.company_id.partner_id.lang, 'translate' : self._translate, 'setHtmlImage' : self.set_html_image, 'strip_name' : self._strip_name, 'time' : time } self.localcontext.update(context) self.rml_header = user.company_id.rml_header self.rml_header2 = user.company_id.rml_header2 self.rml_header3 = user.company_id.rml_header3 self.logo = user.company_id.logo self.name = name self._node = None self.parents = parents self.tag = tag self._lang_cache = {} self.lang_dict = {} self.default_lang = {} self.lang_dict_called = False self._transl_regex = re.compile('(\[\[.+?\]\])') | 8233e8b505ef64ce98d82ac439427872302f0be6 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/8233e8b505ef64ce98d82ac439427872302f0be6/report_sxw.py |
|
'vtimezone': fields.related('user_id', 'context_tz', type='char', size=24, \ string='Timezone', store=True), | 'vtimezone': fields.selection(_tz_get, size=64, string='Timezone'), | def _get_rulestring(self, cr, uid, ids, name, arg, context=None): """ Gets Recurrence rule string according to value type RECUR of iCalendar from the values given. @param self: The object pointer @param cr: the current row, from the database cursor, @param id: List of calendar event's ids. @param context: A standard dictionary for contextual values @return: dictionary of rrule value. """ result = {} for datas in self.read(cr, uid, ids, context=context): event = datas['id'] if datas.get('rrule_type'): if datas.get('rrule_type') == 'none': result[event] = False cr.execute("UPDATE %s set exrule=Null where id=%s" % (self._table, event)) elif datas.get('rrule_type') == 'custom': if datas.get('interval', 0) < 0: raise osv.except_osv('Warning!', 'Interval can not be Negative') if datas.get('count', 0) < 0: raise osv.except_osv('Warning!', 'Count can not be Negative') rrule_custom = self.compute_rule_string(cr, uid, datas, \ context=context) result[event] = rrule_custom else: result[event] = self.compute_rule_string(cr, uid, {'freq': datas.get('rrule_type').upper(), 'interval': 1}, context=context) | 3620ab450f747e50e0df9a763eadea00a88a7c0c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3620ab450f747e50e0df9a763eadea00a88a7c0c/base_calendar.py |
domain=eval(domain) | def ref(str_id): return self.id_get(cr, None, str_id) | 29aff8f992509ed047c649a1fcb6428f78bf965d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/29aff8f992509ed047c649a1fcb6428f78bf965d/convert.py |
|
value = {} | def action_apply(self, cr, uid, ids, context=None): """ This converts lead to opportunity and opens Opportunity view @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Lead to Opportunity IDs @param context: A standard dictionary for contextual values | 9fb8e1684b4bf5c55aed095358b6e1510d6a195c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/9fb8e1684b4bf5c55aed095358b6e1510d6a195c/crm_lead_to_opportunity.py |
|
if record_id: lead_obj = self.pool.get('crm.lead') opp_obj = self. pool.get('crm.opportunity') data_obj = self.pool.get('ir.model.data') history_obj = self.pool.get('crm.case.history') model_obj = self.pool.get('ir.model') result = data_obj._get_id(cr, uid, 'crm', 'view_crm_case_opportunities_filter') res = data_obj.read(cr, uid, result, ['res_id']) id2 = data_obj._get_id(cr, uid, 'crm', 'crm_case_form_view_oppor') id3 = data_obj._get_id(cr, uid, 'crm', 'crm_case_tree_view_oppor') if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id if id3: id3 = data_obj.browse(cr, uid, id3, context=context).res_id lead = lead_obj.browse(cr, uid, record_id, context=context) model_ids = model_obj.search(cr, uid, [('model', '=', 'crm.opportunity')]) for this in self.browse(cr, uid, ids, context=context): new_opportunity_id = opp_obj.create(cr, uid, { 'name': this.name, 'referred': this.referred, 'planned_revenue': this.planned_revenue, 'probability': this.probability, 'partner_id': lead.partner_id and lead.partner_id.id or False , 'section_id': lead.section_id and lead.section_id.id or False, 'description': lead.description or False, 'date_deadline': lead.date_deadline or False, 'partner_address_id': lead.partner_address_id and \ lead.partner_address_id.id or False , 'priority': lead.priority, 'phone': lead.phone, 'email_from': lead.email_from }) new_opportunity = opp_obj.browse(cr, uid, new_opportunity_id) vals = { 'partner_id': this.partner_id and this.partner_id.id or False, } if not lead.opportunity_id: vals.update({'opportunity_id' : new_opportunity.id}) lead_obj.write(cr, uid, [lead.id], vals) lead_obj.case_close(cr, uid, [lead.id]) for his_id in lead.history_line: history_ids = history_obj.copy(cr, uid, his_id.id, \ {'model_id': model_ids[0], \ 'res_id': new_opportunity_id}) opp_obj.case_open(cr, uid, [new_opportunity_id]) value = { 'name': _('Opportunity'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'crm.opportunity', 'res_id': int(new_opportunity_id), 'view_id': False, 'views': [(id2, 'form'), (id3, 'tree'), (False, 'calendar'), (False, 'graph')], 'type': 'ir.actions.act_window', 'search_view_id': res['res_id'] } | if not record_id: return {} lead_obj = self.pool.get('crm.lead') opp_obj = self. pool.get('crm.opportunity') data_obj = self.pool.get('ir.model.data') history_obj = self.pool.get('crm.case.history') model_obj = self.pool.get('ir.model') result = data_obj._get_id(cr, uid, 'crm', 'view_crm_case_opportunities_filter') res = data_obj.read(cr, uid, result, ['res_id']) id2 = data_obj._get_id(cr, uid, 'crm', 'crm_case_form_view_oppor') id3 = data_obj._get_id(cr, uid, 'crm', 'crm_case_tree_view_oppor') if id2: id2 = data_obj.browse(cr, uid, id2, context=context).res_id if id3: id3 = data_obj.browse(cr, uid, id3, context=context).res_id lead = lead_obj.browse(cr, uid, record_id, context=context) model_ids = model_obj.search(cr, uid, [('model', '=', 'crm.opportunity')]) for this in self.browse(cr, uid, ids, context=context): new_opportunity_id = opp_obj.create(cr, uid, { 'name': this.name, 'referred': lead.referred, 'planned_revenue': this.planned_revenue, 'probability': this.probability, 'partner_id': lead.partner_id and lead.partner_id.id or False, 'section_id': lead.section_id and lead.section_id.id or False, 'description': lead.description or False, 'date_deadline': lead.date_deadline or False, 'partner_address_id': lead.partner_address_id and lead.partner_address_id.id or False , 'priority': lead.priority, 'phone': lead.phone, 'email_from': lead.email_from }) new_opportunity = opp_obj.browse(cr, uid, new_opportunity_id) vals = { 'partner_id': this.partner_id and this.partner_id.id or False, } if not lead.opportunity_id: vals.update({'opportunity_id' : new_opportunity.id}) model_opportunity_id = self.pool.get('ir.model').search(cr, uid, [('model', '=', 'crm.opportunity')], context=context)[0] for model in ('crm.case.log', 'crm.case.history'): log_proxy = self.pool.get(model) log_ids = log_proxy.search(cr, uid, [('model_id.model', '=', 'crm.lead'),('res_id', '=', lead.id)], context=context) for log_id in log_ids: log_proxy.copy(cr, uid, log_id, {'model_id':model_opportunity_id}, context=context) lead_obj.write(cr, uid, [lead.id], vals) lead_obj.case_close(cr, uid, [lead.id]) for his_id in lead.history_line: history_ids = history_obj.copy(cr, uid, his_id.id, \ {'model_id': model_ids[0], \ 'res_id': new_opportunity_id}) value = { 'name': _('Opportunity'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_model': 'crm.opportunity', 'res_id': int(new_opportunity_id), 'view_id': False, 'views': [(id2, 'form'), (id3, 'tree'), (False, 'calendar'), (False, 'graph')], 'type': 'ir.actions.act_window', 'search_view_id': res['res_id'] } | def action_apply(self, cr, uid, ids, context=None): """ This converts lead to opportunity and opens Opportunity view @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Lead to Opportunity IDs @param context: A standard dictionary for contextual values | 9fb8e1684b4bf5c55aed095358b6e1510d6a195c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/9fb8e1684b4bf5c55aed095358b6e1510d6a195c/crm_lead_to_opportunity.py |
if result_def['type'] == 'choice': next_state = result_def['next_state'](self, cr, uid, data, context) return self.execute_cr(cr, uid, data, next_state, context) | result_def = state_def.get('result', {}) | 636a14b993b8d57899761a32680d1709193c67c8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/636a14b993b8d57899761a32680d1709193c67c8/__init__.py |
|
def update_list(self, cr, uid, context={}): res = [0, 0] # [update, add] | 5c038b59d998691c8abd7b9aee4052b9eaa83c74 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5c038b59d998691c8abd7b9aee4052b9eaa83c74/module.py |
||
self.write(cr, uid, id, { 'url': ''}) | self.write(cr, uid, id, {'url': ''}) | def update_list(self, cr, uid, context={}): res = [0, 0] # [update, add] | 5c038b59d998691c8abd7b9aee4052b9eaa83c74 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5c038b59d998691c8abd7b9aee4052b9eaa83c74/module.py |
def create(self, cr, uid, data, context={}): id = super(module, self).create(cr, uid, data, context) if data.get('name'): self.pool.get('ir.model.data').create(cr, uid, { 'name': 'module_meta_information', 'model': 'ir.module.module', 'res_id': id, 'module': data['name'], 'noupdate': True, }) return id | def check(self, cr, uid, ids, context=None): logger = logging.getLogger('init') for mod in self.browse(cr, uid, ids, context=context): if not mod.description: logger.warn('module %s: description is empty !', mod.name) | 5c038b59d998691c8abd7b9aee4052b9eaa83c74 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5c038b59d998691c8abd7b9aee4052b9eaa83c74/module.py |
|
if r[self._fields_id] in res.values(): | if r[self._fields_id] in res: | def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None): if not context: context = {} if self._context: context = context.copy() context.update(self._context) if not values: values = {} | a79d0ee8e3d7284169e00e57351aaff17983b52b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a79d0ee8e3d7284169e00e57351aaff17983b52b/fields.py |
raise Exception('The module %s does not contain a description file: __openerp__.py or __terp__.py (deprecated)') | return | def load_information_from_description_file(module): """ :param module: The name of the module (sale, purchase, ...) """ for filename in ['__openerp__.py', '__terp__.py']: description_file = addons.get_module_resource(module, filename) if os.path.isfile(description_file): return eval(tools.file_open(description_file).read()) raise Exception('The module %s does not contain a description file: __openerp__.py or __terp__.py (deprecated)') | b528a80036cc13b539af76e5ff23f195a4086213 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b528a80036cc13b539af76e5ff23f195a4086213/__init__.py |
ids = [] cr.execute('select move_id,sum(debit) from account_move_line group by move_id') result = dict(cr.fetchall()) for item in args: if item[1] == '>=': res = [('id', 'in', [k for k,v in result.iteritems() if v >= item[2]])] | ids = set() for cond in args: amount = cond[2] if isinstance(cond[2],(list,tuple)): if cond[1] in ['in','not in']: amount = tuple(cond[2]) else: continue | def _search_amount(self, cr, uid, obj, name, args, context): ids = [] cr.execute('select move_id,sum(debit) from account_move_line group by move_id') result = dict(cr.fetchall()) | 2da2a984497bfab2280278a20a4e09333be86d98 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/2da2a984497bfab2280278a20a4e09333be86d98/account.py |
res = [('id', 'in', [k for k,v in result.iteritems() if v <= item[2]])] ids += res if not ids: return [('id', '>', '0')] return ids | if cond[1] in ['=like', 'like', 'not like', 'ilike', 'not ilike', 'in', 'not in', 'child_of']: continue cr.execute("select move_id from account_move_line group by move_id having sum(debit) %s %%s" % (cond[1]) ,(amount,)) res_ids = set(id[0] for id in cr.fetchall()) ids = ids and (ids & res_ids) or res_ids if ids: return [('id','in',tuple(ids))] else: return [('id', '=', '0')] | def _search_amount(self, cr, uid, obj, name, args, context): ids = [] cr.execute('select move_id,sum(debit) from account_move_line group by move_id') result = dict(cr.fetchall()) | 2da2a984497bfab2280278a20a4e09333be86d98 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/2da2a984497bfab2280278a20a4e09333be86d98/account.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.