rem
stringlengths
1
322k
add
stringlengths
0
2.05M
context
stringlengths
4
228k
meta
stringlengths
156
215
logging.getLogger('schema').info("Table '%s': column '%s': XXX" % (self._table, k))
self.__schema.debug("Table '%s': column '%s': XXX", self._table, k)
f_pg_def = res[0]
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
logger.notifyChannel('orm', netsvc.LOG_ERROR, "Programming error, column %s->%s has multiple instances !" % (self._table, k))
netsvc.Logger().notifyChannel('orm', netsvc.LOG_ERROR, "Programming error, column %s->%s has multiple instances !" % (self._table, k))
f_pg_def = res[0]
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
logging.getLogger('schema').info("Table '%s': added column '%s' with definition=%s" % (self._table, k, get_pg_type(f)[1]))
self.__schema.debug("Table '%s': added column '%s' with definition=%s", self._table, k, get_pg_type(f)[1])
f_pg_def = res[0]
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
logger.notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
netsvc.Logger().notifyChannel('data', netsvc.LOG_DEBUG, "Table '%s': setting default value of new column %s" % (self._table, k))
f_pg_def = res[0]
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
logging.getLogger('schema').info("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s" % ( self._table, k, ref, f.ondelete))
self.__schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", self._table, k, ref, f.ondelete)
f_pg_def = res[0]
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
logging.getLogger('schema').info("Table '%s': column '%s': added a NOT NULL constraint" % (self._table, k))
self.__schema.debug("Table '%s': column '%s': added a NOT NULL constraint", self._table, k)
f_pg_def = res[0]
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
"ALTER TABLE %s ALTER COLUMN %s SET NOT NULL" % (k, self._table, self._table, k) logger.notifyChannel('orm', netsvc.LOG_WARNING, msg)
"ALTER TABLE %s ALTER COLUMN %s SET NOT NULL" self.__logger.warn(msg, k, self._table, self._table, k)
f_pg_def = res[0]
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
logging.getLogger('schema').info(sql_action['msg_ok'])
self.__schema.debug(sql_action['msg_ok'])
cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
logger.notifyChannel('schema', netsvc.LOG_WARNING, sql_action['msg_err'])
self.__schema.warn(sql_action['msg_err'])
cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
a73711cb180c536a45540480eccaf5c3f0aa8ad3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a73711cb180c536a45540480eccaf5c3f0aa8ad3/orm.py
group_id = model_data_obj._get_id(cr, uid, 'survey', 'base.group_tool_user')
group_id = model_data_obj._get_id(cr, uid, 'base', 'group_tool_user')
def action_send(self, cr, uid, ids, context=None): record = self.read(cr, uid, ids, []) survey_ids = context.get('active_ids', []) record = record and record[0] partner_ids = record['partner_ids'] user_ref= self.pool.get('res.users') survey_ref= self.pool.get('survey')
24d023ee10685a752add11ea8056a569e6598e6b /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/24d023ee10685a752add11ea8056a569e6598e6b/survey_send_invitation.py
super(account_cash_statement, self).write(cr, uid, rs, res.get(rs))
super(account_cash_statement, self).write(cr, uid, [rs], res.get(rs))
def write(self, cr, uid, ids, vals, context=None): """ Update redord(s) comes in {ids}, with new value comes as {vals} return True on success, False otherwise
ed6d7457994f7e25d8196ac119783dae52264c60 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/ed6d7457994f7e25d8196ac119783dae52264c60/account_cash_statement.py
string="Return with Echange" type="object"/>
string="Return with Exchange" type="object"/>
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False,submenu=False):
f881f95bfe14d636766f9cad8e3940eaffcbcbe0 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/f881f95bfe14d636766f9cad8e3940eaffcbcbe0/pos_return.py
select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty
select stock.create_date as date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty
def init(self, cr): tools.drop_view_if_exists(cr, 'report_products_to_received_planned') cr.execute(""" create or replace view report_products_to_received_planned as ( select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'in' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
47502593c67c2f0e03b66420e8c942c1d29f8e1f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/47502593c67c2f0e03b66420e8c942c1d29f8e1f/stock.py
select stock.date, min(stock.id) as id, 0 as actual_qty, sum(stock.product_qty) as planned_qty
select stock.date as date , min(stock.id) as id, 0 as actual_qty, sum(stock.product_qty) as planned_qty
def init(self, cr): tools.drop_view_if_exists(cr, 'report_products_to_received_planned') cr.execute(""" create or replace view report_products_to_received_planned as ( select stock.create_date, min(stock.id) as id, sum(stock.product_qty) as qty, 0 as planned_qty from stock_picking picking inner join stock_move stock on picking.id = stock.picking_id and picking.type = 'in' where stock.create_date between (select cast(date_trunc('week', current_date) as date)) and (select cast(date_trunc('week', current_date) as date) + 7) group by stock.create_date
47502593c67c2f0e03b66420e8c942c1d29f8e1f /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/47502593c67c2f0e03b66420e8c942c1d29f8e1f/stock.py
cr.execute('select min(nextcall) as min_next_call from ir_cron where numbercall<>0 and active and nextcall>=now()')
cr.execute('select min(nextcall) as min_next_call from ir_cron where numbercall<>0 and active')
def _poolJobs(self, db_name, check=False): try: db, pool = pooler.get_db_and_pool(db_name) except: return False cr = db.cursor() try: if not pool._init: now = datetime.now() cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority') for job in cr.dictfetchall(): nextcall = datetime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S') numbercall = job['numbercall']
0eaa680c1feaf33058c0381f848951ba0e8fd136 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0eaa680c1feaf33058c0381f848951ba0e8fd136/ir_cron.py
value = {}
value = {'product_uom_id': ''}
def onchange_product_id(self, cr, uid, ids, product_id,product_uom_id, context={}):
129cfb45727e84c0b10daf933a6f730fdd5b5e56 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/129cfb45727e84c0b10daf933a6f730fdd5b5e56/purchase_requisition.py
if product_uom_id != prod.uom_id.id: value = {'product_uom_id': prod.uom_id.id}
def onchange_product_id(self, cr, uid, ids, product_id,product_uom_id, context={}):
129cfb45727e84c0b10daf933a6f730fdd5b5e56 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/129cfb45727e84c0b10daf933a6f730fdd5b5e56/purchase_requisition.py
def onchange_product_id(self, cr, uid, ids, product_id,product_uom_id, context={}):
129cfb45727e84c0b10daf933a6f730fdd5b5e56 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/129cfb45727e84c0b10daf933a6f730fdd5b5e56/purchase_requisition.py
def onchange_product_id(self, cr, uid, ids, product_id,product_uom_id, context={}):
129cfb45727e84c0b10daf933a6f730fdd5b5e56 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/129cfb45727e84c0b10daf933a6f730fdd5b5e56/purchase_requisition.py
for employee_id in ids: emp = obj_emp.read(cr, uid, [employee_id], ['name'])[0] stop, days_xml = False, [] user_repr = ''' <user> <name>%s</name> %%s </user> ''' % (toxml(emp['name'])) today, tomor = month, month + one_day while today.month == month.month: sql = ''' select action, att.name from hr_employee as emp inner join hr_attendance as att on emp.id = att.employee_id where att.name between %s and %s and emp.id = %s order by att.name ''' cr.execute(sql, (today.strftime('%Y-%m-%d %H:%M:%S'), tomor.strftime('%Y-%m-%d %H:%M:%S'), employee_id)) attendences = cr.dictfetchall() wh = 0 if attendences and attendences[0]['action'] == 'sign_out': attendences.insert(0, {'name': today.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_in'}) if attendences and attendences[-1]['action'] == 'sign_in': attendences.append({'name': tomor.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_out'}) for att in attendences: dt = DateTime.strptime(att['name'], '%Y-%m-%d %H:%M:%S') if att['action'] == 'sign_out': wh += (dt - ldt).hours ldt = dt wh = hour2str(wh) today_xml = '<day num="%s"><wh>%s</wh></day>' % ((today - month).days+1, wh) days_xml.append(today_xml) today, tomor = tomor, tomor + one_day user_xml.append(user_repr % '\n'.join(days_xml))
if emp_ids: for emp in obj_emp.read(cr, uid, emp_ids, ['name']): stop, days_xml = False, [] user_repr = ''' <user> <name>%s</name> %%s </user> ''' % (toxml(emp['name'])) today, tomor = month, month + one_day while today.month == month.month: sql = ''' select action, att.name from hr_employee as emp inner join hr_attendance as att on emp.id = att.employee_id where att.name between %s and %s and emp.id = %s order by att.name ''' cr.execute(sql, (today.strftime('%Y-%m-%d %H:%M:%S'), tomor.strftime('%Y-%m-%d %H:%M:%S'), emp['id'])) attendences = cr.dictfetchall() wh = 0 if attendences and attendences[0]['action'] == 'sign_out': attendences.insert(0, {'name': today.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_in'}) if attendences and attendences[-1]['action'] == 'sign_in': attendences.append({'name': tomor.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_out'}) for att in attendences: dt = DateTime.strptime(att['name'], '%Y-%m-%d %H:%M:%S') if att['action'] == 'sign_out': wh += (dt - ldt).hours ldt = dt wh = hour2str(wh) today_xml = '<day num="%s"><wh>%s</wh></day>' % ((today - month).days+1, wh) days_xml.append(today_xml) today, tomor = tomor, tomor + one_day user_xml.append(user_repr % '\n'.join(days_xml))
def create_xml(self, cr, uid, ids, datas, context=None): obj_emp = pooler.get_pool(cr.dbname).get('hr.employee') if context is None: context = {} month = DateTime.DateTime(datas['form']['year'], datas['form']['month'], 1) user_xml = ['<month>%s</month>' % month2name[month.month], '<year>%s</year>' % month.year] for employee_id in ids: emp = obj_emp.read(cr, uid, [employee_id], ['name'])[0] stop, days_xml = False, [] user_repr = ''' <user> <name>%s</name> %%s </user> ''' % (toxml(emp['name'])) today, tomor = month, month + one_day while today.month == month.month: #### Work hour calculation sql = ''' select action, att.name from hr_employee as emp inner join hr_attendance as att on emp.id = att.employee_id where att.name between %s and %s and emp.id = %s order by att.name ''' cr.execute(sql, (today.strftime('%Y-%m-%d %H:%M:%S'), tomor.strftime('%Y-%m-%d %H:%M:%S'), employee_id)) attendences = cr.dictfetchall() wh = 0 # Fake sign ins/outs at week ends, to take attendances across week ends into account if attendences and attendences[0]['action'] == 'sign_out': attendences.insert(0, {'name': today.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_in'}) if attendences and attendences[-1]['action'] == 'sign_in': attendences.append({'name': tomor.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_out'}) # sum up the attendances' durations for att in attendences: dt = DateTime.strptime(att['name'], '%Y-%m-%d %H:%M:%S') if att['action'] == 'sign_out': wh += (dt - ldt).hours ldt = dt
63bb1811a6ebe854e6aa006836748d9c85c23d12 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/63bb1811a6ebe854e6aa006836748d9c85c23d12/attendance_by_month.py
def create_xml(self, cr, uid, ids, datas, context=None): obj_emp = pooler.get_pool(cr.dbname).get('hr.employee') if context is None: context = {} month = DateTime.DateTime(datas['form']['year'], datas['form']['month'], 1) user_xml = ['<month>%s</month>' % month2name[month.month], '<year>%s</year>' % month.year] for employee_id in ids: emp = obj_emp.read(cr, uid, [employee_id], ['name'])[0] stop, days_xml = False, [] user_repr = ''' <user> <name>%s</name> %%s </user> ''' % (toxml(emp['name'])) today, tomor = month, month + one_day while today.month == month.month: #### Work hour calculation sql = ''' select action, att.name from hr_employee as emp inner join hr_attendance as att on emp.id = att.employee_id where att.name between %s and %s and emp.id = %s order by att.name ''' cr.execute(sql, (today.strftime('%Y-%m-%d %H:%M:%S'), tomor.strftime('%Y-%m-%d %H:%M:%S'), employee_id)) attendences = cr.dictfetchall() wh = 0 # Fake sign ins/outs at week ends, to take attendances across week ends into account if attendences and attendences[0]['action'] == 'sign_out': attendences.insert(0, {'name': today.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_in'}) if attendences and attendences[-1]['action'] == 'sign_in': attendences.append({'name': tomor.strftime('%Y-%m-%d %H:%M:%S'), 'action':'sign_out'}) # sum up the attendances' durations for att in attendences: dt = DateTime.strptime(att['name'], '%Y-%m-%d %H:%M:%S') if att['action'] == 'sign_out': wh += (dt - ldt).hours ldt = dt
63bb1811a6ebe854e6aa006836748d9c85c23d12 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/63bb1811a6ebe854e6aa006836748d9c85c23d12/attendance_by_month.py
res += char.lower()
res += char.upper()
def _format_iban(string): ''' This function removes all characters from given 'string' that isn't a alpha numeric and converts it to lower case. ''' res = "" for char in string: if char.isalnum(): res += char.lower() return res
562dcd893fb70e74d5bfd2f49f79f2220c9fce86 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/562dcd893fb70e74d5bfd2f49f79f2220c9fce86/base_iban.py
iban = _format_iban(bank_acc.iban)
iban = _format_iban(bank_acc.iban).lower()
def check_iban(self, cr, uid, ids): ''' Check the IBAN number ''' for bank_acc in self.browse(cr, uid, ids): if not bank_acc.iban: continue iban = _format_iban(bank_acc.iban) if iban[:2] in _iban_len and len(iban) != _iban_len[iban[:2]]: return False #the four first digits have to be shifted to the end iban = iban[4:] + iban[:4] #letters have to be transformed into numbers (a = 10, b = 11, ...) iban2 = "" for char in iban: if char.isalpha(): iban2 += str(ord(char)-87) else: iban2 += char #iban is correct if modulo 97 == 1 if not int(iban2) % 97 == 1: return False return True
562dcd893fb70e74d5bfd2f49f79f2220c9fce86 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/562dcd893fb70e74d5bfd2f49f79f2220c9fce86/base_iban.py
address_ids = address_pool.search(cr, uid, [('email', '=', from_email)])
address_ids = address_pool.search(cr, uid, [('email', 'like', from_email)])
def get_partner(self, cr, uid, from_email, context=None): """This function returns partner Id based on email passed @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks @param from_email: email address based on that function will search for the correct """ address_pool = self.pool.get('res.partner.address') res = { 'partner_address_id': False, 'partner_id': False } from_email = self.to_email(from_email)[0] address_ids = address_pool.search(cr, uid, [('email', '=', from_email)]) if address_ids: address = address_pool.browse(cr, uid, address_ids[0]) res['partner_address_id'] = address_ids[0] res['partner_id'] = address.partner_id.id
58187afab8dd8b6a3c43f16d496a6f5d5d55f054 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/58187afab8dd8b6a3c43f16d496a6f5d5d55f054/mail_gateway.py
nmonth = str(int(next_date.strftime("%m"))% 12+2)
nmonth = str(int(next_date.strftime("%m"))% 12+1)
def compute(self, cr, uid, id, value, date_ref=False, context={}): if not date_ref: date_ref = datetime.now().strftime('%Y-%m-%d') pt = self.browse(cr, uid, id, context) amount = value result = [] for line in pt.line_ids: prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account') if line.value == 'fixed': amt = round(line.value_amount, prec) elif line.value == 'procent': amt = round(value * line.value_amount, prec) elif line.value == 'balance': amt = round(amount, prec) if amt: next_date = (datetime.strptime(date_ref, '%Y-%m-%d') + relativedelta(days=line.days)) if line.days2 < 0: nyear = next_date.strftime("%Y") nmonth = str(int(next_date.strftime("%m"))% 12+2) nday = "1"
7915f499d60c0a93208e3198ff8b5b57937b1e93 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/7915f499d60c0a93208e3198ff8b5b57937b1e93/account.py
def compute(self, cr, uid, id, value, date_ref=False, context={}): if not date_ref: date_ref = datetime.now().strftime('%Y-%m-%d') pt = self.browse(cr, uid, id, context) amount = value result = [] for line in pt.line_ids: prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account') if line.value == 'fixed': amt = round(line.value_amount, prec) elif line.value == 'procent': amt = round(value * line.value_amount, prec) elif line.value == 'balance': amt = round(amount, prec) if amt: next_date = (datetime.strptime(date_ref, '%Y-%m-%d') + relativedelta(days=line.days)) if line.days2 < 0: nyear = next_date.strftime("%Y") nmonth = str(int(next_date.strftime("%m"))% 12+2) nday = "1"
7915f499d60c0a93208e3198ff8b5b57937b1e93 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/7915f499d60c0a93208e3198ff8b5b57937b1e93/account.py
cr.execute("SELECT 1 FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check)) if cr.fetchone(): raise except_orm('ConcurrencyException', _('Records were modified in the meanwhile'))
cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check)) res = cr.fetchone() if res: raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
def _check_concurrency(self, cr, ids, context): if not context: return if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access): return check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, now())::timestamp)" for sub_ids in cr.split_for_in_conditions(ids): ids_to_check = [] for id in sub_ids: id_ref = "%s,%s" % (self._name, id) update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None) if update_date: ids_to_check.extend([id, update_date]) if not ids_to_check: continue cr.execute("SELECT 1 FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check)) if cr.fetchone(): raise except_orm('ConcurrencyException', _('Records were modified in the meanwhile'))
ea26bded7aa4f8427bbf8f9a3760ec93dc84a586 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/ea26bded7aa4f8427bbf8f9a3760ec93dc84a586/orm.py
"Function: %s" % (pa.function and pa.function.name_get()[0][1] or ''),
"Function: %s" % (pa.function or ''),
def get_lead_details(self, cr, uid, lead_id, context=None): body = [] lead_proxy = self.pool.get('crm.lead') lead = lead_proxy.browse(cr, uid, lead_id, context=context) if not lead.type or lead.type == 'lead': field_names = [ 'partner_name', 'title', 'function', 'street', 'street2', 'zip', 'city', 'country_id', 'state_id', 'email_from', 'phone', 'fax', 'mobile' ]
2aa3e0c9d3a0755f377969a7b42b3e9543c88ecf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/2aa3e0c9d3a0755f377969a7b42b3e9543c88ecf/crm_forward_to_partner.py
if not os.path.isfile(terp_file):
if not terp_file or not os.path.isfile(terp_file):
def upgrade_graph(graph, cr, module_list, force=None): if force is None: force = [] packages = [] len_graph = len(graph) for module in module_list: mod_path = get_module_path(module) terp_file = get_module_resource(module, '__openerp__.py') if not os.path.isfile(terp_file): terp_file = get_module_resource(module, '__terp__.py') if not mod_path or not terp_file: global not_loaded not_loaded.append(module) logger.notifyChannel('init', netsvc.LOG_WARNING, 'module %s: not installable' % (module)) raise osv.osv.except_osv('Error!',"Module '%s' was not found" % (module,)) if os.path.isfile(terp_file) or zipfile.is_zipfile(mod_path+'.zip'): try: info = eval(tools.file_open(terp_file).read()) except: logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: eval file %s' % (module, terp_file)) raise if info.get('installable', True): packages.append((module, info.get('depends', []), info)) dependencies = dict([(p, deps) for p, deps, data in packages]) current, later = set([p for p, dep, data in packages]), set() while packages and current > later: package, deps, data = packages[0] # if all dependencies of 'package' are already in the graph, add 'package' in the graph if reduce(lambda x, y: x and y in graph, deps, True): if not package in current: packages.pop(0) continue later.clear() current.remove(package) graph.addNode(package, deps) node = Node(package, graph) node.data = data for kind in ('init', 'demo', 'update'): if package in tools.config[kind] or 'all' in tools.config[kind] or kind in force: setattr(node, kind, True) else: later.add(package) packages.append((package, deps, data)) packages.pop(0) graph.update_from_db(cr) for package in later: unmet_deps = filter(lambda p: p not in graph, dependencies[package]) logger.notifyChannel('init', netsvc.LOG_ERROR, 'module %s: Unmet dependencies: %s' % (package, ', '.join(unmet_deps))) result = len(graph) - len_graph if result != len(module_list): logger.notifyChannel('init', netsvc.LOG_WARNING, 'Not all modules have loaded.') return result
78556c7a19e25699e1206de9e3cc92533fe20f53 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/78556c7a19e25699e1206de9e3cc92533fe20f53/__init__.py
return all(this.company_id in this.company_ids for this in self.browse(cr, uid, ids, context))
return all(((this.company_id in this.company_ids) or not this.company_ids) for this in self.browse(cr, uid, ids, context))
def _check_company(self, cr, uid, ids, context=None): return all(this.company_id in this.company_ids for this in self.browse(cr, uid, ids, context))
de0f1d786380a43a96af7ea46a3eef1a45cabf65 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/de0f1d786380a43a96af7ea46a3eef1a45cabf65/res_user.py
clause += 'AND inv.state <> \'paid\''
clause += 'AND inv.state != \'cancel\' AND sale.state != \'cancel\' AND inv.state <> \'paid\' AND rel.order_id = sale.id ' sale_clause = ', sale_order AS sale '
def _invoiced_search(self, cursor, user, obj, name, args, context=None): if context is None: context = {} if not len(args): return [] clause = '' no_invoiced = False for arg in args: if arg[1] == '=': if arg[2]: clause += 'AND inv.state = \'paid\'' else: clause += 'AND inv.state <> \'paid\'' no_invoiced = True
b648470328eaee45d485c864a637421046f3c99c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b648470328eaee45d485c864a637421046f3c99c/sale.py
'FROM sale_order_invoice_rel AS rel, account_invoice AS inv ' \
'FROM sale_order_invoice_rel AS rel, account_invoice AS inv '+ sale_clause + \
def _invoiced_search(self, cursor, user, obj, name, args, context=None): if context is None: context = {} if not len(args): return [] clause = '' no_invoiced = False for arg in args: if arg[1] == '=': if arg[2]: clause += 'AND inv.state = \'paid\'' else: clause += 'AND inv.state <> \'paid\'' no_invoiced = True
b648470328eaee45d485c864a637421046f3c99c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b648470328eaee45d485c864a637421046f3c99c/sale.py
def _invoiced_search(self, cursor, user, obj, name, args, context=None): if context is None: context = {} if not len(args): return [] clause = '' no_invoiced = False for arg in args: if arg[1] == '=': if arg[2]: clause += 'AND inv.state = \'paid\'' else: clause += 'AND inv.state <> \'paid\'' no_invoiced = True
b648470328eaee45d485c864a637421046f3c99c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b648470328eaee45d485c864a637421046f3c99c/sale.py
'FROM sale_order_invoice_rel AS rel) ')
'FROM sale_order_invoice_rel AS rel) and sale.state != \'cancel\'')
def _invoiced_search(self, cursor, user, obj, name, args, context=None): if context is None: context = {} if not len(args): return [] clause = '' no_invoiced = False for arg in args: if arg[1] == '=': if arg[2]: clause += 'AND inv.state = \'paid\'' else: clause += 'AND inv.state <> \'paid\'' no_invoiced = True
b648470328eaee45d485c864a637421046f3c99c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/b648470328eaee45d485c864a637421046f3c99c/sale.py
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None): """ @param product_id: Id of product @param product_qty: Quantity of product @return: List of Values or False
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None, lock=False): """ Attempt to find a quantity ``product_qty`` (in the product's default uom or the uom passed in ``context``) of product ``product_id`` in locations with id ``ids`` and their child locations. If ``lock`` is True, the stock.move lines of product with id ``product_id`` in the searched location will be write-locked using Postgres's "FOR UPDATE NOWAIT" option until the transaction is committed or rolled back, to prevent reservin twice the same products. If ``lock`` is True and the lock cannot be obtained (because another transaction has locked some of the same stock.move lines), a log line will be output and False will be returned, as if there was not enough stock. :param product_id: Id of product to reserve :param product_qty: Quantity of product to reserve (in the product's default uom or the uom passed in ``context``) :param lock: if True, the stock.move lines of product with id ``product_id`` in all locations (and children locations) with ``ids`` will be write-locked using postgres's "FOR UPDATE NOWAIT" option until the transaction is committed or rolled back. This is to prevent reserving twice the same products. :param context: optional context dictionary: it a 'uom' key is present it will be used instead of the default product uom to compute the ``product_qty`` and in the return value. :return: List of tuples in the form (qty, location_id) with the (partial) quantities that can be taken in each location to reach the requested product_qty (``qty`` is expressed in the default uom of the product), of False if enough products could not be found, or the lock could not be obtained (and ``lock`` was True).
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None): """ @param product_id: Id of product @param product_qty: Quantity of product @return: List of Values or False """ result = [] amount = 0.0 if context is None: context = {} for id in self.search(cr, uid, [('location_id', 'child_of', ids)]): cr.execute("select product_uom,sum(product_qty) as product_qty from stock_move where location_dest_id=%s and location_id<>%s and product_id=%s and state='done' group by product_uom", (id, id, product_id)) results = cr.dictfetchall() cr.execute("select product_uom,-sum(product_qty) as product_qty from stock_move where location_id=%s and location_dest_id<>%s and product_id=%s and state in ('done', 'assigned') group by product_uom", (id, id, product_id)) results += cr.dictfetchall()
fed8a6838f1bfe251e8f3b59483688b993f333a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fed8a6838f1bfe251e8f3b59483688b993f333a8/stock.py
cr.execute("select product_uom,sum(product_qty) as product_qty from stock_move where location_dest_id=%s and location_id<>%s and product_id=%s and state='done' group by product_uom", (id, id, product_id))
if lock: try: cr.execute("SAVEPOINT stock_location_product_reserve") cr.execute("""SELECT id FROM stock_move WHERE product_id=%s AND ( (location_dest_id=%s AND location_id<>%s AND state='done') OR (location_id=%s AND location_dest_id<>%s AND state in ('done', 'assigned')) ) FOR UPDATE of stock_move NOWAIT""", (product_id, id, id, id, id), log_exceptions=False) except Exception: cr.execute("ROLLBACK TO stock_location_product_reserve") logger = logging.getLogger('stock.location') logger.warn("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id) logger.debug("Trace of the failed product reservation attempt: ", exc_info=True) return False cr.execute("""SELECT product_uom, sum(product_qty) AS product_qty FROM stock_move WHERE location_dest_id=%s AND location_id<>%s AND product_id=%s AND state='done' GROUP BY product_uom """, (id, id, product_id))
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None): """ @param product_id: Id of product @param product_qty: Quantity of product @return: List of Values or False """ result = [] amount = 0.0 if context is None: context = {} for id in self.search(cr, uid, [('location_id', 'child_of', ids)]): cr.execute("select product_uom,sum(product_qty) as product_qty from stock_move where location_dest_id=%s and location_id<>%s and product_id=%s and state='done' group by product_uom", (id, id, product_id)) results = cr.dictfetchall() cr.execute("select product_uom,-sum(product_qty) as product_qty from stock_move where location_id=%s and location_dest_id<>%s and product_id=%s and state in ('done', 'assigned') group by product_uom", (id, id, product_id)) results += cr.dictfetchall()
fed8a6838f1bfe251e8f3b59483688b993f333a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fed8a6838f1bfe251e8f3b59483688b993f333a8/stock.py
cr.execute("select product_uom,-sum(product_qty) as product_qty from stock_move where location_id=%s and location_dest_id<>%s and product_id=%s and state in ('done', 'assigned') group by product_uom", (id, id, product_id))
cr.execute("""SELECT product_uom,-sum(product_qty) AS product_qty FROM stock_move WHERE location_id=%s AND location_dest_id<>%s AND product_id=%s AND state in ('done', 'assigned') GROUP BY product_uom """, (id, id, product_id))
def _product_reserve(self, cr, uid, ids, product_id, product_qty, context=None): """ @param product_id: Id of product @param product_qty: Quantity of product @return: List of Values or False """ result = [] amount = 0.0 if context is None: context = {} for id in self.search(cr, uid, [('location_id', 'child_of', ids)]): cr.execute("select product_uom,sum(product_qty) as product_qty from stock_move where location_dest_id=%s and location_id<>%s and product_id=%s and state='done' group by product_uom", (id, id, product_id)) results = cr.dictfetchall() cr.execute("select product_uom,-sum(product_qty) as product_qty from stock_move where location_id=%s and location_dest_id<>%s and product_id=%s and state in ('done', 'assigned') group by product_uom", (id, id, product_id)) results += cr.dictfetchall()
fed8a6838f1bfe251e8f3b59483688b993f333a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fed8a6838f1bfe251e8f3b59483688b993f333a8/stock.py
res = self.pool.get('stock.location')._product_reserve(cr, uid, [move.location_id.id], move.product_id.id, move.product_qty, {'uom': move.product_uom.id})
res = self.pool.get('stock.location')._product_reserve(cr, uid, [move.location_id.id], move.product_id.id, move.product_qty, {'uom': move.product_uom.id}, lock=True)
def check_assign(self, cr, uid, ids, context=None): """ Checks the product type and accordingly writes the state. @return: No. of moves done """ done = [] count = 0 pickings = {} if context is None: context = {} for move in self.browse(cr, uid, ids, context=context): if move.product_id.type == 'consu': if move.state in ('confirmed', 'waiting'): done.append(move.id) pickings[move.picking_id.id] = 1 continue if move.state in ('confirmed', 'waiting'): res = self.pool.get('stock.location')._product_reserve(cr, uid, [move.location_id.id], move.product_id.id, move.product_qty, {'uom': move.product_uom.id}) if res: #_product_available_test depends on the next status for correct functioning #the test does not work correctly if the same product occurs multiple times #in the same order. This is e.g. the case when using the button 'split in two' of #the stock outgoing form self.write(cr, uid, [move.id], {'state':'assigned'}) done.append(move.id) pickings[move.picking_id.id] = 1 r = res.pop(0) cr.execute('update stock_move set location_id=%s, product_qty=%s where id=%s', (r[1], r[0], move.id))
fed8a6838f1bfe251e8f3b59483688b993f333a8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fed8a6838f1bfe251e8f3b59483688b993f333a8/stock.py
values['description']=this.notes values['partner_id']=this.partner_id
values['description']=this.notes or '' values['partner_id']=this.partner_id.id
def action_apply(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids)[0] record_id = context and context.get('record_id', False) or False values={} values['name']=this.name values['user_id']=this.user_id and this.user_id.id values['categ_id']=this.category_id and this.category_id.id values['section_id']=this.section_id and this.section_id.id or False, values['description']=this.notes values['partner_id']=this.partner_id values['partner_address_id']=this.address_id.id phonecall_proxy = self.pool.get('crm.phonecall') phonecall_id = phonecall_proxy.create(cr, uid, values, context=context) value = { 'name': _('Phone Call'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'crm.phonecall', 'view_id': False, 'type': 'ir.actions.act_window', 'res_id': phonecall_id } return value
93a6ebff04c022be656ad3a5c03503696e6807f1 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/93a6ebff04c022be656ad3a5c03503696e6807f1/crm_phonecall2phonecall.py
class audittrail_objects_proxy(osv_pool):
class audittrail_objects_proxy(object_proxy):
def _name_get_resname(self, cr, uid, ids, *args): data = {} for resname in self.browse(cr, uid, ids,[]): model_object = resname.object_id res_id = resname.res_id if model_object and res_id: model_pool = self.pool.get(model_object.model) res = model_pool.read(cr, uid, res_id, ['name']) data[resname.id] = res['name'] else: data[resname.id] = False return data
177e2675c98e4b156e26db8c1a0cc6659e79c054 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/177e2675c98e4b156e26db8c1a0cc6659e79c054/audittrail.py
'name': pool.get('ir.sequence').get(cr, uid, 'stock.picking'),
'name': pool.get('ir.sequence').get(cr, uid, 'stock.picking.in'),
def _do_split(self, cr, uid, data, context): move_obj = pooler.get_pool(cr.dbname).get('stock.move') pick_obj = pooler.get_pool(cr.dbname).get('stock.picking') delivery_obj = pooler.get_pool(cr.dbname).get('stock.delivery') pick = pick_obj.browse(cr, uid, [data['id']])[0] new_picking = None new_moves = [] complete, too_many, too_few = [], [], [] pool = pooler.get_pool(cr.dbname) for move in move_obj.browse(cr, uid, data['form'].get('moves',[])): if move.product_qty == data['form']['move%s' % move.id]: complete.append(move) elif move.product_qty > data['form']['move%s' % move.id]: too_few.append(move) else: too_many.append(move) # Average price computation if (pick.type == 'in') and (move.product_id.cost_method == 'average'): product_obj = pool.get('product.product') currency_obj = pool.get('res.currency') users_obj = pool.get('res.users') uom_obj = pool.get('product.uom') product = product_obj.browse(cr, uid, [move.product_id.id])[0] user = users_obj.browse(cr, uid, [uid])[0] qty = data['form']['move%s' % move.id] uom = data['form']['uom%s' % move.id] price = data['form']['price%s' % move.id] currency = data['form']['currency%s' % move.id] qty = uom_obj._compute_qty(cr, uid, uom, qty, product.uom_id.id) pricetype=pool.get('product.price.type').browse(cr,uid,user.company_id.property_valuation_price_type.id) if (qty > 0): new_price = currency_obj.compute(cr, uid, currency, user.company_id.currency_id.id, price) new_price = uom_obj._compute_price(cr, uid, uom, new_price, product.uom_id.id) if product.qty_available<=0: new_std_price = new_price else: # Get the standard price amount_unit=product.price_get(pricetype.field, context)[product.id] new_std_price = ((amount_unit * product.qty_available)\ + (new_price * qty))/(product.qty_available + qty) # Write the field according to price type field product_obj.write(cr, uid, [product.id], {pricetype.field: new_std_price}) move_obj.write(cr, uid, [move.id], {'price_unit': new_price}) for move in too_few: if not new_picking: new_picking = pick_obj.copy(cr, uid, pick.id, { 'name': pool.get('ir.sequence').get(cr, uid, 'stock.picking'), 'move_lines' : [], 'state':'draft', }) if data['form']['move%s' % move.id] != 0: new_obj = move_obj.copy(cr, uid, move.id, { 'product_qty' : data['form']['move%s' % move.id], 'product_uos_qty':data['form']['move%s' % move.id], 'picking_id' : new_picking, 'state': 'assigned', 'move_dest_id': False, 'partner_id': data['form']['partner_id%s' % pick.id], 'address_id': data['form']['address_id%s' % pick.id], 'price_unit': move.price_unit, }) delivery_id = delivery_obj.search(cr,uid, [('name','=',pick.name)]) if not delivery_id : delivery_id = delivery_obj.create(cr, uid, { 'name': pick.name, 'partner_id': data['form']['partner_id%s' % pick.id], 'date': move.date, 'product_delivered':[(6,0, [new_obj])], 'picking_id':move.picking_id.id }, context=context) if not isinstance(delivery_id, (int, long)): delivery_id=delivery_id[0] delivery_obj.write(cr, uid, [delivery_id], {'product_delivered': [(4, new_obj)]}) move_obj.write(cr, uid, [move.id], { 'product_qty' : move.product_qty - data['form']['move%s' % move.id], 'product_uos_qty':move.product_qty - data['form']['move%s' % move.id], # 'delivered_id':delivery_id }) if new_picking: move_obj.write(cr, uid, [c.id for c in complete], {'picking_id': new_picking}) for move in too_many: move_obj.write(cr, uid, [move.id], { 'product_qty' : data['form']['move%s' % move.id], 'product_uos_qty': data['form']['move%s' % move.id], 'picking_id': new_picking, }) else: for move in too_many: move_obj.write(cr, uid, [move.id], { 'product_qty': data['form']['move%s' % move.id], 'product_uos_qty': data['form']['move%s' % move.id] }) # At first we confirm the new picking (if necessary) wf_service = netsvc.LocalService("workflow") if new_picking: wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_confirm', cr) # Then we finish the good picking if new_picking: pick_obj.write(cr, uid, [pick.id], {'backorder_id': new_picking}) pick_obj.action_move(cr, uid, [new_picking]) wf_service.trg_validate(uid, 'stock.picking', new_picking, 'button_done', cr) wf_service.trg_write(uid, 'stock.picking', pick.id, cr) else: pick_obj.action_move(cr, uid, [pick.id]) wf_service.trg_validate(uid, 'stock.picking', pick.id, 'button_done', cr) bo_name = '' if new_picking: bo_name = pick_obj.read(cr, uid, [new_picking], ['name'])[0]['name'] return {'new_picking':new_picking or False, 'back_order':bo_name}
43c66f1ce51b1b29dd14b1566c97c2da066612b3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/43c66f1ce51b1b29dd14b1566c97c2da066612b3/wizard_partial_picking.py
pur_tax_parent = mod_obj._get_id(cr, uid, 'account', 'vat_code_base_purchases')
pur_tax_parent = mod_obj._get_id(cr, uid, 'account', 'tax_code_base_purchases')
def execute(self, cr, uid, ids, context=None): if context is None: context = {} super(account_installer, self).execute(cr, uid, ids, context=context) record = self.browse(cr, uid, ids, context=context)[0] company_id = self.pool.get('res.users').browse(cr, uid, [uid], context)[0].company_id for res in self.read(cr, uid, ids): if record.charts == 'configurable': mod_obj = self.pool.get('ir.model.data') fp = tools.file_open(opj('account','configurable_account_chart.xml')) tools.convert_xml_import(cr, 'account', fp, {}, 'init',True, None) fp.close() self.generate_configurable_chart(cr, uid, ids, context=context) obj_tax = self.pool.get('account.tax') obj_product = self.pool.get('product.product') ir_values = self.pool.get('ir.values') s_tax = (res.get('sale_tax',0.0))/100 p_tax = (res.get('purchase_tax',0.0))/100 tax_val = {} default_tax = []
3ac06a178ac1b3e9a98090f621c16012c5ba4454 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3ac06a178ac1b3e9a98090f621c16012c5ba4454/installer.py
sal_tax_parent = mod_obj._get_id(cr, uid, 'account', 'vat_code_base_sales')
sal_tax_parent = mod_obj._get_id(cr, uid, 'account', 'tax_code_base_sales')
def execute(self, cr, uid, ids, context=None): if context is None: context = {} super(account_installer, self).execute(cr, uid, ids, context=context) record = self.browse(cr, uid, ids, context=context)[0] company_id = self.pool.get('res.users').browse(cr, uid, [uid], context)[0].company_id for res in self.read(cr, uid, ids): if record.charts == 'configurable': mod_obj = self.pool.get('ir.model.data') fp = tools.file_open(opj('account','configurable_account_chart.xml')) tools.convert_xml_import(cr, 'account', fp, {}, 'init',True, None) fp.close() self.generate_configurable_chart(cr, uid, ids, context=context) obj_tax = self.pool.get('account.tax') obj_product = self.pool.get('product.product') ir_values = self.pool.get('ir.values') s_tax = (res.get('sale_tax',0.0))/100 p_tax = (res.get('purchase_tax',0.0))/100 tax_val = {} default_tax = []
3ac06a178ac1b3e9a98090f621c16012c5ba4454 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3ac06a178ac1b3e9a98090f621c16012c5ba4454/installer.py
'name': 'VAT%s%%'%(s_tax*100), 'code': 'VAT%s%%'%(s_tax*100),
'name': 'TAX%s%%'%(s_tax*100), 'code': 'TAX%s%%'%(s_tax*100),
def execute(self, cr, uid, ids, context=None): if context is None: context = {} super(account_installer, self).execute(cr, uid, ids, context=context) record = self.browse(cr, uid, ids, context=context)[0] company_id = self.pool.get('res.users').browse(cr, uid, [uid], context)[0].company_id for res in self.read(cr, uid, ids): if record.charts == 'configurable': mod_obj = self.pool.get('ir.model.data') fp = tools.file_open(opj('account','configurable_account_chart.xml')) tools.convert_xml_import(cr, 'account', fp, {}, 'init',True, None) fp.close() self.generate_configurable_chart(cr, uid, ids, context=context) obj_tax = self.pool.get('account.tax') obj_product = self.pool.get('product.product') ir_values = self.pool.get('ir.values') s_tax = (res.get('sale_tax',0.0))/100 p_tax = (res.get('purchase_tax',0.0))/100 tax_val = {} default_tax = []
3ac06a178ac1b3e9a98090f621c16012c5ba4454 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3ac06a178ac1b3e9a98090f621c16012c5ba4454/installer.py
{'name':'VAT%s%%'%(s_tax*100), 'description':'VAT%s%%'%(s_tax*100),
{'name':'TAX%s%%'%(s_tax*100), 'description':'TAX%s%%'%(s_tax*100),
def execute(self, cr, uid, ids, context=None): if context is None: context = {} super(account_installer, self).execute(cr, uid, ids, context=context) record = self.browse(cr, uid, ids, context=context)[0] company_id = self.pool.get('res.users').browse(cr, uid, [uid], context)[0].company_id for res in self.read(cr, uid, ids): if record.charts == 'configurable': mod_obj = self.pool.get('ir.model.data') fp = tools.file_open(opj('account','configurable_account_chart.xml')) tools.convert_xml_import(cr, 'account', fp, {}, 'init',True, None) fp.close() self.generate_configurable_chart(cr, uid, ids, context=context) obj_tax = self.pool.get('account.tax') obj_product = self.pool.get('product.product') ir_values = self.pool.get('ir.values') s_tax = (res.get('sale_tax',0.0))/100 p_tax = (res.get('purchase_tax',0.0))/100 tax_val = {} default_tax = []
3ac06a178ac1b3e9a98090f621c16012c5ba4454 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3ac06a178ac1b3e9a98090f621c16012c5ba4454/installer.py
'name': 'VAT%s%%'%(p_tax*100), 'code': 'VAT%s%%'%(p_tax*100),
'name': 'TAX%s%%'%(p_tax*100), 'code': 'TAX%s%%'%(p_tax*100),
def execute(self, cr, uid, ids, context=None): if context is None: context = {} super(account_installer, self).execute(cr, uid, ids, context=context) record = self.browse(cr, uid, ids, context=context)[0] company_id = self.pool.get('res.users').browse(cr, uid, [uid], context)[0].company_id for res in self.read(cr, uid, ids): if record.charts == 'configurable': mod_obj = self.pool.get('ir.model.data') fp = tools.file_open(opj('account','configurable_account_chart.xml')) tools.convert_xml_import(cr, 'account', fp, {}, 'init',True, None) fp.close() self.generate_configurable_chart(cr, uid, ids, context=context) obj_tax = self.pool.get('account.tax') obj_product = self.pool.get('product.product') ir_values = self.pool.get('ir.values') s_tax = (res.get('sale_tax',0.0))/100 p_tax = (res.get('purchase_tax',0.0))/100 tax_val = {} default_tax = []
3ac06a178ac1b3e9a98090f621c16012c5ba4454 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3ac06a178ac1b3e9a98090f621c16012c5ba4454/installer.py
{'name':'VAT%s%%'%(p_tax*100), 'description':'VAT%s%%'%(p_tax*100),
{'name':'TAX%s%%'%(p_tax*100), 'description':'TAX%s%%'%(p_tax*100),
def execute(self, cr, uid, ids, context=None): if context is None: context = {} super(account_installer, self).execute(cr, uid, ids, context=context) record = self.browse(cr, uid, ids, context=context)[0] company_id = self.pool.get('res.users').browse(cr, uid, [uid], context)[0].company_id for res in self.read(cr, uid, ids): if record.charts == 'configurable': mod_obj = self.pool.get('ir.model.data') fp = tools.file_open(opj('account','configurable_account_chart.xml')) tools.convert_xml_import(cr, 'account', fp, {}, 'init',True, None) fp.close() self.generate_configurable_chart(cr, uid, ids, context=context) obj_tax = self.pool.get('account.tax') obj_product = self.pool.get('product.product') ir_values = self.pool.get('ir.values') s_tax = (res.get('sale_tax',0.0))/100 p_tax = (res.get('purchase_tax',0.0))/100 tax_val = {} default_tax = []
3ac06a178ac1b3e9a98090f621c16012c5ba4454 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3ac06a178ac1b3e9a98090f621c16012c5ba4454/installer.py
if move.prodlot_id and (move.prodlot_id.product_id.id != move.product_id.id):
if move.prodlot_id and move.state == 'done' and (move.prodlot_id.product_id.id != move.product_id.id):
def _check_product_lot(self, cr, uid, ids): for move in self.browse(cr, uid, ids): if move.prodlot_id and (move.prodlot_id.product_id.id != move.product_id.id): return False return True
a5bdbbb73246853cdcfe41f0a4483ce4a1a01899 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/a5bdbbb73246853cdcfe41f0a4483ce4a1a01899/stock.py
def _module_nbr(self,cr,uid, ids, prop, unknow_none,context):
def _module_nbr(self,cr,uid, ids, prop, unknow_none, context):
def _module_nbr(self,cr,uid, ids, prop, unknow_none,context): cr.execute('SELECT category_id, COUNT(*) \ FROM ir_module_module \ WHERE category_id IN %(ids)s \ OR category_id IN (SELECT id \ FROM ir_module_category \ WHERE parent_id IN %(ids)s) \ GROUP BY category_id', {'ids': tuple(ids)} ) result = dict(cr.fetchall()) for id in ids: cr.execute('select id from ir_module_category where parent_id=%s', (id,)) result[id] = sum([result.get(c, 0) for (c,) in cr.fetchall()], result.get(id, 0)) return result
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def _get_latest_version(self, cr, uid, ids, field_name=None, arg=None, context={}):
def _get_latest_version(self, cr, uid, ids, field_name=None, arg=None, context=None):
def _get_latest_version(self, cr, uid, ids, field_name=None, arg=None, context={}): res = dict.fromkeys(ids, '') for m in self.browse(cr, uid, ids): res[m.id] = self.get_module_info(m.name).get('version', '') return res
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context={}):
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context=None):
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context={}): res = {} model_data_obj = self.pool.get('ir.model.data') view_obj = self.pool.get('ir.ui.view') report_obj = self.pool.get('ir.actions.report.xml') menu_obj = self.pool.get('ir.ui.menu') mlist = self.browse(cr, uid, ids, context=context) mnames = {} for m in mlist: mnames[m.name] = m.id res[m.id] = { 'menus_by_module':[], 'reports_by_module':[], 'views_by_module': [] } view_id = model_data_obj.search(cr,uid,[('module','in', mnames.keys()), ('model','in',('ir.ui.view','ir.actions.report.xml','ir.ui.menu'))]) for data_id in model_data_obj.browse(cr,uid,view_id,context): # We use try except, because views or menus may not exist try: key = data_id['model'] if key=='ir.ui.view': v = view_obj.browse(cr,uid,data_id.res_id) aa = v.inherit_id and '* INHERIT ' or '' res[mnames[data_id.module]]['views_by_module'].append(aa + v.name + '('+v.type+')') elif key=='ir.actions.report.xml': res[mnames[data_id.module]]['reports_by_module'].append(report_obj.browse(cr,uid,data_id.res_id).name) elif key=='ir.ui.menu': res[mnames[data_id.module]]['menus_by_module'].append(menu_obj.browse(cr,uid,data_id.res_id).complete_name) except KeyError, e: pass for key, value in res.iteritems() : for k, v in res[key].iteritems() : res[key][k] = "\n".join(sorted(v)) return res
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
key = data_id['model']
key = data_id.model
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context={}): res = {} model_data_obj = self.pool.get('ir.model.data') view_obj = self.pool.get('ir.ui.view') report_obj = self.pool.get('ir.actions.report.xml') menu_obj = self.pool.get('ir.ui.menu') mlist = self.browse(cr, uid, ids, context=context) mnames = {} for m in mlist: mnames[m.name] = m.id res[m.id] = { 'menus_by_module':[], 'reports_by_module':[], 'views_by_module': [] } view_id = model_data_obj.search(cr,uid,[('module','in', mnames.keys()), ('model','in',('ir.ui.view','ir.actions.report.xml','ir.ui.menu'))]) for data_id in model_data_obj.browse(cr,uid,view_id,context): # We use try except, because views or menus may not exist try: key = data_id['model'] if key=='ir.ui.view': v = view_obj.browse(cr,uid,data_id.res_id) aa = v.inherit_id and '* INHERIT ' or '' res[mnames[data_id.module]]['views_by_module'].append(aa + v.name + '('+v.type+')') elif key=='ir.actions.report.xml': res[mnames[data_id.module]]['reports_by_module'].append(report_obj.browse(cr,uid,data_id.res_id).name) elif key=='ir.ui.menu': res[mnames[data_id.module]]['menus_by_module'].append(menu_obj.browse(cr,uid,data_id.res_id).complete_name) except KeyError, e: pass for key, value in res.iteritems() : for k, v in res[key].iteritems() : res[key][k] = "\n".join(sorted(v)) return res
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
for key, value in res.iteritems() :
except Exception, e: self.__logger.warning('Unknown error while browsing %s[%s]', data_id.model, data_id.res_id, exc_info=True) pass for key, value in res.iteritems():
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context={}): res = {} model_data_obj = self.pool.get('ir.model.data') view_obj = self.pool.get('ir.ui.view') report_obj = self.pool.get('ir.actions.report.xml') menu_obj = self.pool.get('ir.ui.menu') mlist = self.browse(cr, uid, ids, context=context) mnames = {} for m in mlist: mnames[m.name] = m.id res[m.id] = { 'menus_by_module':[], 'reports_by_module':[], 'views_by_module': [] } view_id = model_data_obj.search(cr,uid,[('module','in', mnames.keys()), ('model','in',('ir.ui.view','ir.actions.report.xml','ir.ui.menu'))]) for data_id in model_data_obj.browse(cr,uid,view_id,context): # We use try except, because views or menus may not exist try: key = data_id['model'] if key=='ir.ui.view': v = view_obj.browse(cr,uid,data_id.res_id) aa = v.inherit_id and '* INHERIT ' or '' res[mnames[data_id.module]]['views_by_module'].append(aa + v.name + '('+v.type+')') elif key=='ir.actions.report.xml': res[mnames[data_id.module]]['reports_by_module'].append(report_obj.browse(cr,uid,data_id.res_id).name) elif key=='ir.ui.menu': res[mnames[data_id.module]]['menus_by_module'].append(menu_obj.browse(cr,uid,data_id.res_id).complete_name) except KeyError, e: pass for key, value in res.iteritems() : for k, v in res[key].iteritems() : res[key][k] = "\n".join(sorted(v)) return res
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
'state': lambda *a: 'uninstalled', 'demo': lambda *a: False, 'license': lambda *a: 'AGPL-3',
'state': 'uninstalled', 'demo': False, 'license': 'AGPL-3',
def _get_views(self, cr, uid, ids, field_name=None, arg=None, context={}): res = {} model_data_obj = self.pool.get('ir.model.data') view_obj = self.pool.get('ir.ui.view') report_obj = self.pool.get('ir.actions.report.xml') menu_obj = self.pool.get('ir.ui.menu') mlist = self.browse(cr, uid, ids, context=context) mnames = {} for m in mlist: mnames[m.name] = m.id res[m.id] = { 'menus_by_module':[], 'reports_by_module':[], 'views_by_module': [] } view_id = model_data_obj.search(cr,uid,[('module','in', mnames.keys()), ('model','in',('ir.ui.view','ir.actions.report.xml','ir.ui.menu'))]) for data_id in model_data_obj.browse(cr,uid,view_id,context): # We use try except, because views or menus may not exist try: key = data_id['model'] if key=='ir.ui.view': v = view_obj.browse(cr,uid,data_id.res_id) aa = v.inherit_id and '* INHERIT ' or '' res[mnames[data_id.module]]['views_by_module'].append(aa + v.name + '('+v.type+')') elif key=='ir.actions.report.xml': res[mnames[data_id.module]]['reports_by_module'].append(report_obj.browse(cr,uid,data_id.res_id).name) elif key=='ir.ui.menu': res[mnames[data_id.module]]['menus_by_module'].append(menu_obj.browse(cr,uid,data_id.res_id).complete_name) except KeyError, e: pass for key, value in res.iteritems() : for k, v in res[key].iteritems() : res[key][k] = "\n".join(sorted(v)) return res
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def button_install(self, cr, uid, ids, context={}):
def button_install(self, cr, uid, ids, context=None):
def button_install(self, cr, uid, ids, context={}): return self.state_update(cr, uid, ids, 'to install', ['uninstalled'], context)
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def button_install_cancel(self, cr, uid, ids, context={}):
def button_install_cancel(self, cr, uid, ids, context=None):
def button_install_cancel(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state': 'uninstalled', 'demo':False}) return True
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def button_uninstall(self, cr, uid, ids, context={}):
def button_uninstall(self, cr, uid, ids, context=None):
def button_uninstall(self, cr, uid, ids, context={}): for module in self.browse(cr, uid, ids): cr.execute('''select m.state,m.name from ir_module_module_dependency d join ir_module_module m on (d.module_id=m.id) where d.name=%s and m.state not in ('uninstalled','uninstallable','to remove')''', (module.name,)) res = cr.fetchall() if res: raise orm.except_orm(_('Error'), _('Some installed modules depend on the module you plan to Uninstall :\n %s') % '\n'.join(map(lambda x: '\t%s: %s' % (x[0], x[1]), res))) self.write(cr, uid, ids, {'state': 'to remove'}) return True
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def button_uninstall_cancel(self, cr, uid, ids, context={}):
def button_uninstall_cancel(self, cr, uid, ids, context=None):
def button_uninstall_cancel(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state': 'installed'}) return True
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def button_upgrade_cancel(self, cr, uid, ids, context={}):
def button_upgrade_cancel(self, cr, uid, ids, context=None):
def button_upgrade_cancel(self, cr, uid, ids, context={}): self.write(cr, uid, ids, {'state': 'installed'}) return True
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def _update_dependencies(self, cr, uid, id, depends=[]):
def _update_dependencies(self, cr, uid, id, depends=None): if depends is None: depends = []
def _update_dependencies(self, cr, uid, id, depends=[]): for d in depends: cr.execute('INSERT INTO ir_module_module_dependency (module_id, name) values (%s, %s)', (id, d))
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def _state(self, cr, uid, ids, name, args, context={}):
def _state(self, cr, uid, ids, name, args, context=None):
def _state(self, cr, uid, ids, name, args, context={}): result = {} mod_obj = self.pool.get('ir.module.module') for md in self.browse(cr, uid, ids): ids = mod_obj.search(cr, uid, [('name', '=', md.name)]) if ids: result[md.id] = mod_obj.read(cr, uid, [ids[0]], ['state'])[0]['state'] else: result[md.id] = 'unknown' return result
0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/0c1061e4deb5a9c9963ede2ea8bc2b185ff66af8/module.py
def SearchFontPath(font_file):
__foundFonts = [] def FindCustomFonts(): """Fill the __foundFonts list with those filenames, whose fonts can be found in the reportlab ttf font path. This process needs only be done once per loading of this module, it is cached. But, if the system admin adds some font in the meanwhile, the server must be restarted eventually. """ dirpath = [] global __foundFonts
def SearchFontPath(font_file): for dirname in rl_config.TTFSearchPath: for root, dirs, files in os.walk(os.path.abspath(dirname)): for file_name in files: filename = os.path.join(root, file_name) extension = os.path.splitext(filename)[1] if extension.lower() in ['.ttf']: if file_name==font_file: return True return False
d1347d0b0d62c13375232d1defd4b2cff6d87acd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d1347d0b0d62c13375232d1defd4b2cff6d87acd/customfonts.py
for root, dirs, files in os.walk(os.path.abspath(dirname)): for file_name in files: filename = os.path.join(root, file_name) extension = os.path.splitext(filename)[1] if extension.lower() in ['.ttf']: if file_name==font_file: return True return False
abp = os.path.abspath(dirname) if os.path.isdir(abp): dirpath.append(abp) for k, (name, font, fname, mode) in enumerate(CustomTTFonts): if fname in __foundFonts: continue for d in dirpath: if os.path.exists(os.path.join(d, fname)): print "found font %s in %s" % (fname, d) __foundFonts.append(fname) break
def SearchFontPath(font_file): for dirname in rl_config.TTFSearchPath: for root, dirs, files in os.walk(os.path.abspath(dirname)): for file_name in files: filename = os.path.join(root, file_name) extension = os.path.splitext(filename)[1] if extension.lower() in ['.ttf']: if file_name==font_file: return True return False
d1347d0b0d62c13375232d1defd4b2cff6d87acd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d1347d0b0d62c13375232d1defd4b2cff6d87acd/customfonts.py
move = self.pool.get('stock.move').browse(cr, uid, context['active_id'], context=context) if 'product_id' in fields: res.update({'product_id': move.product_id.id}) if 'product_uom' in fields: res.update({'product_uom': move.product_uom.id}) if 'qty' in fields: res.update({'qty': move.product_qty}) if 'use_exist' in fields: res.update({'use_exist': (move.picking_id and move.picking_id.type=='out' and True) or False})
if context.get('active_id'): move = self.pool.get('stock.move').browse(cr, uid, context['active_id'], context=context) if 'product_id' in fields: res.update({'product_id': move.product_id.id}) if 'product_uom' in fields: res.update({'product_uom': move.product_uom.id}) if 'qty' in fields: res.update({'qty': move.product_qty}) if 'use_exist' in fields: res.update({'use_exist': (move.picking_id and move.picking_id.type=='out' and True) or False})
def default_get(self, cr, uid, fields, context=None): """ Get default values @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for default value @param context: A standard dictionary @return: Default values of fields """
446c9873772b04c9722155e6aba062001753c0aa /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/446c9873772b04c9722155e6aba062001753c0aa/stock_move.py
HOST = config.get('ftp_server_address', detect_ip_addr()) PORT = int(config.get('ftp_server_port', '8021')) address = (HOST, PORT) ftpd = ftpserver.FTPServer(address, ftpserver.FTPHandler)
ftpd = ftpserver.FTPServer((HOST, PORT), ftpserver.FTPHandler)
def run(self): autho = authorizer.authorizer() ftpserver.FTPHandler.authorizer = autho ftpserver.max_cons = 300 ftpserver.max_cons_per_ip = 50 ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs if PASSIVE_PORTS: ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS
e7a22755e5d44a4de0867e44e19e269af5af56f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/e7a22755e5d44a4de0867e44e19e269af5af56f9/__init__.py
ds = ftp_server() ds.start()
if HOST.lower() == 'none': netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Server FTP Not Started\n") else: netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Serving FTP on %s:%s\n" % (HOST, PORT)) ds = ftp_server() ds.start()
def run(self): autho = authorizer.authorizer() ftpserver.FTPHandler.authorizer = autho ftpserver.max_cons = 300 ftpserver.max_cons_per_ip = 50 ftpserver.FTPHandler.abstracted_fs = abstracted_fs.abstracted_fs if PASSIVE_PORTS: ftpserver.FTPHandler.passive_ports = PASSIVE_PORTS
e7a22755e5d44a4de0867e44e19e269af5af56f9 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/e7a22755e5d44a4de0867e44e19e269af5af56f9/__init__.py
cr.execute('select sum(nb_register) from event_registration where id IN %s', (tuple(reg_ids),)) number = cr.fetchone()
number = 0.0 if reg_ids: cr.execute('select sum(nb_register) from event_registration where id IN %s', (tuple(reg_ids),)) number = cr.fetchone()
def _get_register(self, cr, uid, ids, fields, args, context=None): """Get Confirm or uncofirm register value. @param ids: List of Event registration type's id @param fields: List of function fields(register_current and register_prospect). @param context: A standard dictionary for contextual values @return: Dictionary of function fields value. """ register_pool = self.pool.get('event.registration') res = {} for event in self.browse(cr, uid, ids, context): res[event.id] = {} for field in fields: res[event.id][field] = False state = [] if 'register_current' in fields: state += ['open', 'done'] if 'register_prospect' in fields: state.append('draft')
39655a3578b18c3aac36322c556e4b06d5203a00 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/39655a3578b18c3aac36322c556e4b06d5203a00/event.py
res[event.id]['register_current'] = number and number[0] or 0.0
res[event.id]['register_current'] = number and number[0]
def _get_register(self, cr, uid, ids, fields, args, context=None): """Get Confirm or uncofirm register value. @param ids: List of Event registration type's id @param fields: List of function fields(register_current and register_prospect). @param context: A standard dictionary for contextual values @return: Dictionary of function fields value. """ register_pool = self.pool.get('event.registration') res = {} for event in self.browse(cr, uid, ids, context): res[event.id] = {} for field in fields: res[event.id][field] = False state = [] if 'register_current' in fields: state += ['open', 'done'] if 'register_prospect' in fields: state.append('draft')
39655a3578b18c3aac36322c556e4b06d5203a00 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/39655a3578b18c3aac36322c556e4b06d5203a00/event.py
res[event.id]['register_prospect'] = number and number[0] or 0.0
res[event.id]['register_prospect'] = number and number[0]
def _get_register(self, cr, uid, ids, fields, args, context=None): """Get Confirm or uncofirm register value. @param ids: List of Event registration type's id @param fields: List of function fields(register_current and register_prospect). @param context: A standard dictionary for contextual values @return: Dictionary of function fields value. """ register_pool = self.pool.get('event.registration') res = {} for event in self.browse(cr, uid, ids, context): res[event.id] = {} for field in fields: res[event.id][field] = False state = [] if 'register_current' in fields: state += ['open', 'done'] if 'register_prospect' in fields: state.append('draft')
39655a3578b18c3aac36322c556e4b06d5203a00 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/39655a3578b18c3aac36322c556e4b06d5203a00/event.py
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string').encode('utf8'))
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
def check_group(node): if node.get('groups'): groups = node.get('groups').split(',') access_pool = self.pool.get('ir.model.access') can_see = any(access_pool.check_groups(cr, user, group) for group in groups) if not can_see: node.set('invisible', '1') if 'attrs' in node.attrib: del(node.attrib['attrs']) #avoid making field visible later del(node.attrib['groups']) return can_see else: return True
80c4054ceb50018be0f374df918b01fc5378cf04 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/80c4054ceb50018be0f374df918b01fc5378cf04/orm.py
trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string').encode('utf8'))
trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
def check_group(node): if node.get('groups'): groups = node.get('groups').split(',') access_pool = self.pool.get('ir.model.access') can_see = any(access_pool.check_groups(cr, user, group) for group in groups) if not can_see: node.set('invisible', '1') if 'attrs' in node.attrib: del(node.attrib['attrs']) #avoid making field visible later del(node.attrib['groups']) return can_see else: return True
80c4054ceb50018be0f374df918b01fc5378cf04 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/80c4054ceb50018be0f374df918b01fc5378cf04/orm.py
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum').encode('utf8'))
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('sum'))
def check_group(node): if node.get('groups'): groups = node.get('groups').split(',') access_pool = self.pool.get('ir.model.access') can_see = any(access_pool.check_groups(cr, user, group) for group in groups) if not can_see: node.set('invisible', '1') if 'attrs' in node.attrib: del(node.attrib['attrs']) #avoid making field visible later del(node.attrib['groups']) return can_see else: return True
80c4054ceb50018be0f374df918b01fc5378cf04 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/80c4054ceb50018be0f374df918b01fc5378cf04/orm.py
operator = 'in'
if ids2: operator = 'in'
def rg(ids, table, parent): if not ids: return [] ids2 = table.search(cr, uid, [(parent, 'in', ids)], context=context) return ids + rg(ids2, table, parent)
915f5b6db79579ac3f3c6a9d35c290b192f01ed3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/915f5b6db79579ac3f3c6a9d35c290b192f01ed3/expression.py
call_null = True operator = 'in'
if operator in ['like','ilike','in','=']: call_null = False self.__exp[i] = ('id','=',0) else: call_null = True operator = 'in'
def rg(ids, table, parent): if not ids: return [] ids2 = table.search(cr, uid, [(parent, 'in', ids)], context=context) return ids + rg(ids2, table, parent)
915f5b6db79579ac3f3c6a9d35c290b192f01ed3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/915f5b6db79579ac3f3c6a9d35c290b192f01ed3/expression.py
operator = 'in'
if res_ids: opeartor = 'in'
def _rec_convert(ids): if field_obj == table: return ids return self.__execute_recursive_in(cr, field._id1, field._rel, field._id2, ids, operator, field._type)
915f5b6db79579ac3f3c6a9d35c290b192f01ed3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/915f5b6db79579ac3f3c6a9d35c290b192f01ed3/expression.py
call_null_m2m = True operator = 'in'
if operator in ['like','ilike','in','=']: call_null_m2m = False self.__exp[i] = ('id','=',0) else: call_null_m2m = True operator = 'in'
def _rec_convert(ids): if field_obj == table: return ids return self.__execute_recursive_in(cr, field._id1, field._rel, field._id2, ids, operator, field._type)
915f5b6db79579ac3f3c6a9d35c290b192f01ed3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/915f5b6db79579ac3f3c6a9d35c290b192f01ed3/expression.py
def _rec_convert(ids): if field_obj == table: return ids return self.__execute_recursive_in(cr, field._id1, field._rel, field._id2, ids, operator, field._type)
915f5b6db79579ac3f3c6a9d35c290b192f01ed3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/915f5b6db79579ac3f3c6a9d35c290b192f01ed3/expression.py
for t in parse_func(d):
for t in parse_func(d.iter()):
field_def = objmodel._columns[field_name]
e1f829b0eba128971ce36f8774c9cfb5fbfeda8d /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/e1f829b0eba128971ce36f8774c9cfb5fbfeda8d/translate.py
if data['form']['period_from'] == False or data['form']['period_to'] == False:
if not data['form']['period_from'] or not data['form']['period_to']:
def _build_context(self, cr, uid, ids, data, context=None): if context is None: context = {} result = {} result['fiscalyear'] = data['form']['fiscalyear_id'] and data['form']['fiscalyear_id'] or False if data['form']['filter'] == 'filter_date': result['date_from'] = data['form']['date_from'] result['date_to'] = data['form']['date_to'] elif data['form']['filter'] == 'filter_period': if data['form']['period_from'] == False or data['form']['period_to'] == False: raise osv.except_osv(_('Error'),_('Select Start period and End period')) period_date_start = period_obj.read(cr, uid, data['form']['period_from'], ['date_start'])['date_start'] period_date_stop = period_obj.read(cr, uid, data['form']['period_to'], ['date_stop'])['date_stop'] cr.execute('SELECT id FROM account_period WHERE date_start >= %s AND date_stop <= %s', (period_date_start, period_date_stop)) result['periods'] = lambda x: x[0], cr.fetchall() return result
307d40f4b13c2f814468a460299a4412c9d501be /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/307d40f4b13c2f814468a460299a4412c9d501be/account_common_report.py
hr_evaluation_obj = self.pool.get('hr_evaluation.evaluation')
hr_evaluation_interview_obj = self.pool.get('hr.evaluation.interview')
def send_mail(self, cr, uid, ids, context=None): hr_evaluation_obj = self.pool.get('hr_evaluation.evaluation') if context is None: context = {} evaluation_data = self.read(cr, uid, ids, context=context)[0] for waiting_id in hr_evaluation_obj.browse(cr, uid, evaluation_data['evaluation_id'], context=context).survey_request_ids: if waiting_id.state == "waiting_answer" and waiting_id.user_to_review_id.work_email : msg = " Hello %s, \n\n Kindly post your response for %s survey. \n\n Thanks," %(waiting_id.user_to_review_id.name, waiting_id.survey_id.title) tools.email_send(tools.config['email_from'], [waiting_id.user_to_review_id.work_email],\ 'Reminder to fill up Survey', msg) return {'type': 'ir.actions.act_window_close'}
816dc0de90a3e970620d855ce35d9071ea23b358 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/816dc0de90a3e970620d855ce35d9071ea23b358/hr_evaluation_mail.py
for waiting_id in hr_evaluation_obj.browse(cr, uid, evaluation_data['evaluation_id'], context=context).survey_request_ids: if waiting_id.state == "waiting_answer" and waiting_id.user_to_review_id.work_email : msg = " Hello %s, \n\n Kindly post your response for %s survey. \n\n Thanks," %(waiting_id.user_to_review_id.name, waiting_id.survey_id.title) tools.email_send(tools.config['email_from'], [waiting_id.user_to_review_id.work_email],\
current_interview = hr_evaluation_interview_obj.browse(cr, uid, evaluation_data.get('evaluation_id')) if current_interview.state == "waiting_answer" and current_interview.user_to_review_id.work_email : msg = " Hello %s, \n\n Kindly post your response for '%s' survey interview. \n\n Thanks," %(current_interview.user_to_review_id.name, current_interview.survey_id.title) tools.email_send(tools.config['email_from'], [current_interview.user_to_review_id.work_email],\
def send_mail(self, cr, uid, ids, context=None): hr_evaluation_obj = self.pool.get('hr_evaluation.evaluation') if context is None: context = {} evaluation_data = self.read(cr, uid, ids, context=context)[0] for waiting_id in hr_evaluation_obj.browse(cr, uid, evaluation_data['evaluation_id'], context=context).survey_request_ids: if waiting_id.state == "waiting_answer" and waiting_id.user_to_review_id.work_email : msg = " Hello %s, \n\n Kindly post your response for %s survey. \n\n Thanks," %(waiting_id.user_to_review_id.name, waiting_id.survey_id.title) tools.email_send(tools.config['email_from'], [waiting_id.user_to_review_id.work_email],\ 'Reminder to fill up Survey', msg) return {'type': 'ir.actions.act_window_close'}
816dc0de90a3e970620d855ce35d9071ea23b358 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/816dc0de90a3e970620d855ce35d9071ea23b358/hr_evaluation_mail.py
def load_module_graph(cr, graph, status=None, perform_checks=True, **kwargs):
def load_module_graph(cr, graph, status=None, perform_checks=True, skip_cleanup=False, **kwargs): """Migrates+Updates or Installs all module nodes from ``graph`` :param graph: graph of module nodes to load :param status: status dictionary for keeping track of progress :param perform_checks: whether module descriptors should be checked for validity (prints warnings for same cases, and even raise osv_except if certificate is invalid) :param skip_cleanup: whether the auto-cleanup of records should be executed (unlinks any object that appears to be from one of the updated modules, but have not been loaded during last loading (i.e. records that seem to have been removed from the module). This is best left disabled when loading stand-alone modules that could contain records from dependent modules (i.e. other modules have put records in their namespace) """
def load_module_graph(cr, graph, status=None, perform_checks=True, **kwargs): def process_sql_file(cr, fp): queries = fp.read().split(';') for query in queries: new_query = ' '.join(query.split()) if new_query: cr.execute(new_query) def load_init_update_xml(cr, m, idref, mode, kind): for filename in package.data.get('%s_xml' % kind, []): logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: loading %s' % (m, filename)) _, ext = os.path.splitext(filename) fp = tools.file_open(opj(m, filename)) if ext == '.csv': noupdate = (kind == 'init') tools.convert_csv_import(cr, m, os.path.basename(filename), fp.read(), idref, mode=mode, noupdate=noupdate) elif ext == '.sql': process_sql_file(cr, fp) elif ext == '.yml': tools.convert_yaml_import(cr, m, fp, idref, mode=mode, **kwargs) else: tools.convert_xml_import(cr, m, fp, idref, mode=mode, **kwargs) fp.close() def load_demo_xml(cr, m, idref, mode): for xml in package.data.get('demo_xml', []): name, ext = os.path.splitext(xml) logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: loading %s' % (m, xml)) fp = tools.file_open(opj(m, xml)) if ext == '.csv': tools.convert_csv_import(cr, m, os.path.basename(xml), fp.read(), idref, mode=mode, noupdate=True) elif ext == '.yml': tools.convert_yaml_import(cr, m, fp, idref, mode=mode, noupdate=True, **kwargs) else: tools.convert_xml_import(cr, m, fp, idref, mode=mode, noupdate=True, **kwargs) fp.close() def load_data(cr, module_name, id_map, mode): _load_data(cr, module_name, id_map, mode, 'data') def load_demo(cr, module_name, id_map, mode): _load_data(cr, module_name, id_map, mode, 'demo') def load_test(cr, module_name, id_map, mode): cr.commit() if not tools.config.options['test_disable']: try: _load_data(cr, module_name, id_map, mode, 'test') except Exception, e: logging.getLogger('test').exception('Tests failed to execute in %s module %s', module_name) finally: if tools.config.options['test_commit']: cr.commit() else: cr.rollback() def _load_data(cr, module_name, id_map, mode, kind): noupdate = (kind == 'demo') for filename in package.data.get(kind, []): _, ext = os.path.splitext(filename) log.info("module %s: loading %s", module_name, filename) pathname = os.path.join(module_name, filename) file = tools.file_open(pathname) # TODO manage .csv file with noupdate == (kind == 'init') if ext == '.sql': process_sql_file(cr, file) elif ext == '.csv': noupdate = (kind == 'init') tools.convert_csv_import(cr, module_name, pathname, file.read(), id_map, mode, noupdate) elif ext == '.yml': tools.convert_yaml_import(cr, module_name, file, id_map, mode, noupdate) else: tools.convert_xml_import(cr, module_name, file, id_map, mode, noupdate) file.close() # **kwargs is passed directly to convert_xml_import if not status: status = {} status = status.copy() package_todo = [] statusi = 0 pool = pooler.get_pool(cr.dbname) migrations = MigrationManager(cr, graph) has_updates = False modobj = None logger.notifyChannel('init', netsvc.LOG_DEBUG, 'loading %d packages..' % len(graph)) for package in graph: logger.notifyChannel('init', netsvc.LOG_INFO, 'module %s: loading objects' % package.name) migrations.migrate_module(package, 'pre') register_class(package.name) modules = pool.instanciate(package.name, cr) if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'): init_module_objects(cr, package.name, modules) cr.commit() for package in graph: status['progress'] = (float(statusi)+0.1) / len(graph) m = package.name mid = package.id if modobj is None: modobj = pool.get('ir.module.module') if modobj and perform_checks: modobj.check(cr, 1, [mid]) idref = {} status['progress'] = (float(statusi)+0.4) / len(graph) mode = 'update' if hasattr(package, 'init') or package.state == 'to install': mode = 'init' if hasattr(package, 'init') or hasattr(package, 'update') or package.state in ('to install', 'to upgrade'): has_updates = True for kind in ('init', 'update'): if package.state=='to upgrade': # upgrading the module information modobj.write(cr, 1, [mid], modobj.get_values_from_terp(package.data)) load_init_update_xml(cr, m, idref, mode, kind) load_data(cr, m, idref, mode) if hasattr(package, 'demo') or (package.dbdemo and package.state != 'installed'): status['progress'] = (float(statusi)+0.75) / len(graph) load_demo_xml(cr, m, idref, mode) load_demo(cr, m, idref, mode) cr.execute('update ir_module_module set demo=%s where id=%s', (True, mid)) # launch tests only in demo mode, as most tests will depend # on demo data. Other tests can be added into the regular # 'data' section, but should probably not alter the data, # as there is no rollback. load_test(cr, m, idref, mode) package_todo.append(package.name) migrations.migrate_module(package, 'post') if modobj: ver = release.major_version + '.' + package.data.get('version', '1.0') # Set new modules and dependencies modobj.write(cr, 1, [mid], {'state': 'installed', 'latest_version': ver}) cr.commit() # Update translations for all installed languages modobj.update_translations(cr, 1, [mid], None) cr.commit() package.state = 'installed' for kind in ('init', 'demo', 'update'): if hasattr(package, kind): delattr(package, kind) statusi += 1 cr.execute('select model from ir_model where state=%s', ('manual',)) for model in cr.dictfetchall(): pool.get('ir.model').instanciate(cr, 1, model['model'], {}) pool.get('ir.model.data')._process_end(cr, 1, package_todo) cr.commit() return has_updates
7bcccbafe585701724e4bc118ab9b9495145d08c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/7bcccbafe585701724e4bc118ab9b9495145d08c/__init__.py
pool.get('ir.model.data')._process_end(cr, 1, package_todo)
if not skip_cleanup: pool.get('ir.model.data')._process_end(cr, 1, package_todo)
def _load_data(cr, module_name, id_map, mode, kind): noupdate = (kind == 'demo') for filename in package.data.get(kind, []): _, ext = os.path.splitext(filename) log.info("module %s: loading %s", module_name, filename) pathname = os.path.join(module_name, filename) file = tools.file_open(pathname) # TODO manage .csv file with noupdate == (kind == 'init') if ext == '.sql': process_sql_file(cr, file) elif ext == '.csv': noupdate = (kind == 'init') tools.convert_csv_import(cr, module_name, pathname, file.read(), id_map, mode, noupdate) elif ext == '.yml': tools.convert_yaml_import(cr, module_name, file, id_map, mode, noupdate) else: tools.convert_xml_import(cr, module_name, file, id_map, mode, noupdate) file.close()
7bcccbafe585701724e4bc118ab9b9495145d08c /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/7bcccbafe585701724e4bc118ab9b9495145d08c/__init__.py
move.write(cr, uid, [move.id], {'state': 'done'})
move.write({'state': 'done'})
def test_finished(self, cr, uid, ids): """ Tests whether the move is in done or cancel state or not. @return: True or False """ move_ids = self.pool.get('stock.move').search(cr, uid, [('picking_id', 'in', ids)]) for move in self.pool.get('stock.move').browse(cr, uid, move_ids): if move.state not in ('done', 'cancel'): if move.product_qty != 0.0: return False else: move.write(cr, uid, [move.id], {'state': 'done'}) return True
4665c073c841c690ffe994b39a76105a717371c3 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/4665c073c841c690ffe994b39a76105a717371c3/stock.py
'description' : """ * Save the Thunderbird plug­in. \n * Follow the Steps to configure OpenERP in Thunderbird. \n -> 1. Go to Tools > OpenERP Synchronization. \n -> 2. check data (configured by default). \n -> 3. Click Test Connection. \n -> 4. A message appears with state of your connection. \n -> 5. If your connection failed, check if your database is open, and check your data. \n -> 6. If you have a good connection, click Ok and start to archive mail in OpenERP.
'description' : """ * Save the Thunderbird plug­in. \n * Follow the Steps to configure OpenERP in Thunderbird. \n -> 1. Go to Tools > OpenERP Synchronization. \n -> 2. check data (configured by default). \n -> 3. Click Test Connection. \n -> 4. A message appears with state of your connection. \n -> 5. If your connection failed, check if your database is open, and check your data. \n -> 6. If you have a good connection, click Ok and start to archive mail in OpenERP."""
def default_get(self, cr, uid, fields, context={}): data = super(thunderbird_installer, self).default_get(cr, uid, fields, context) pdf_file = open(config['addons_path'] + "/thunderbird/doc/Installation Guide to OpenERP Thunderbid Plug-in.pdf", 'r') data['pdf_file'] = base64.encodestring(pdf_file.read()) file = open(config['addons_path'] + "/thunderbird/plugin/OpenERP_plugin-2.0.xpi", 'r') data['plugin_file'] = base64.encodestring(file.read()) return data
ddc18f506e0553d64edb5647896a55443de810fd /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/ddc18f506e0553d64edb5647896a55443de810fd/installer.py
'action_id': fields.many2one('ir.actions.actions', 'Home Action'),
'action_id': fields.many2one('ir.actions.act_window', 'Action', required=True),
def generate_random_pass(): pass_chars = RANDOM_PASS_CHARACTERS[:] random.shuffle(pass_chars) return ''.join(pass_chars[0:10])
5a3d000f53c314c2371a67a107ab4f10b5f260ea /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5a3d000f53c314c2371a67a107ab4f10b5f260ea/wizard_share.py
def default_get(self, cr, uid, fields, context=None): """ To get default values for the object. """ res = super(share_create, self).default_get(cr, uid, fields, context=context) if not context: context={} action_id = context.get('action_id', False) domain = context.get('domain', '[]') if 'action_id' in fields: res['action_id'] = action_id if 'domain' in fields: res['domain'] = domain return res
def generate_random_pass(): pass_chars = RANDOM_PASS_CHARACTERS[:] random.shuffle(pass_chars) return ''.join(pass_chars[0:10])
5a3d000f53c314c2371a67a107ab4f10b5f260ea /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/5a3d000f53c314c2371a67a107ab4f10b5f260ea/wizard_share.py
fiscal_date_start = fiscal_obj.browse(cr, uid, [data['form']['fiscalyear_id']], context=context)[0].date_start result_initial_bal['empty_fy_allow'] = True result_initial_bal['fiscalyear'] = fiscal_obj.search(cr, uid, [('date_stop', '<', fiscal_date_start), ('state', '=', 'draft')], context=context) result_initial_bal['date_from'] = '0001-01-01' result_initial_bal['date_to'] = (datetime.datetime.strptime(fiscal_date_start, "%Y-%m-%d") + timedelta(days=-1)).strftime('%Y-%m-%d')
if data['form']['fiscalyear_id']: fiscal_date_start = fiscal_obj.browse(cr, uid, [data['form']['fiscalyear_id']], context=context)[0].date_start result_initial_bal['empty_fy_allow'] = True result_initial_bal['fiscalyear'] = fiscal_obj.search(cr, uid, [('date_stop', '<', fiscal_date_start), ('state', '=', 'draft')], context=context) result_initial_bal['date_from'] = '0001-01-01' result_initial_bal['date_to'] = (datetime.datetime.strptime(fiscal_date_start, "%Y-%m-%d") + timedelta(days=-1)).strftime('%Y-%m-%d')
def _build_contexts(self, cr, uid, ids, data, context=None): if context is None: context = {} result = {} period_obj = self.pool.get('account.period') fiscal_obj = self.pool.get('account.fiscalyear') result['fiscalyear'] = 'fiscalyear_id' in data['form'] and data['form']['fiscalyear_id'] or False result['journal_ids'] = 'journal_ids' in data['form'] and data['form']['journal_ids'] or False result['chart_account_id'] = 'chart_account_id' in data['form'] and data['form']['chart_account_id'] or False result_initial_bal = result.copy() if data['form']['filter'] == 'filter_date': result['date_from'] = data['form']['date_from'] result['date_to'] = data['form']['date_to'] result_initial_bal['date_from'] = '0001-01-01' result_initial_bal['date_to'] = (datetime.datetime.strptime(data['form']['date_from'], "%Y-%m-%d") + timedelta(days=-1)).strftime('%Y-%m-%d') elif data['form']['filter'] == 'filter_period': if not data['form']['period_from'] or not data['form']['period_to']: raise osv.except_osv(_('Error'),_('Select a starting and an ending period')) result['periods'] = self._build_periods(cr, uid, data['form']['period_from'], data['form']['period_to']) first_period = self.pool.get('account.period').search(cr, uid, [], order='date_start', limit=1)[0] result_initial_bal['periods'] = self._build_periods(cr, uid, first_period, data['form']['period_from']) else: fiscal_date_start = fiscal_obj.browse(cr, uid, [data['form']['fiscalyear_id']], context=context)[0].date_start result_initial_bal['empty_fy_allow'] = True #Improve me => there should be something generic in account.move.line -> query get result_initial_bal['fiscalyear'] = fiscal_obj.search(cr, uid, [('date_stop', '<', fiscal_date_start), ('state', '=', 'draft')], context=context) result_initial_bal['date_from'] = '0001-01-01' result_initial_bal['date_to'] = (datetime.datetime.strptime(fiscal_date_start, "%Y-%m-%d") + timedelta(days=-1)).strftime('%Y-%m-%d') return result, result_initial_bal
d2cbb8c382dda4184e8b927a00ab5c29208a17cf /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/d2cbb8c382dda4184e8b927a00ab5c29208a17cf/account_report_common.py
for vals in self.read(cr, uid, ids, context=context): user = vals.get('user_id')
for vals in self.browse(cr, uid, ids, context=context): user = vals.user_id
def do_accept(self, cr, uid, ids, context=None, *args): """ Update state which value is accepted. @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of calendar attendee’s IDs. @return: True """ if not context: context = {} for vals in self.read(cr, uid, ids, context=context): user = vals.get('user_id') if user: ref = vals.get('ref', None) if ref: if ref.user_id.id != user[0]: defaults = {'user_id': user[0]} new_event = model_obj.copy(cr, uid, event, default=defaults, context=context) self.write(cr, uid, ids, {'state': 'accepted'}, context) return True
3b8a7a9b445e5bfe8ee513b642e480e5b4f08b04 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3b8a7a9b445e5bfe8ee513b642e480e5b4f08b04/base_calendar.py
ref = vals.get('ref', None) if ref: if ref.user_id.id != user[0]: defaults = {'user_id': user[0]} new_event = model_obj.copy(cr, uid, event, default=defaults, context=context) self.write(cr, uid, ids, {'state': 'accepted'}, context)
mod_obj = self.pool.get(vals.ref._name) if vals.ref: if vals.ref.user_id.id != user.id: defaults = {'user_id': user.id} new_event = mod_obj.copy(cr, uid, vals.ref.id, default=defaults, context=context) self.write(cr, uid, vals.id, {'state': 'accepted'}, context)
def do_accept(self, cr, uid, ids, context=None, *args): """ Update state which value is accepted. @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of calendar attendee’s IDs. @return: True """ if not context: context = {} for vals in self.read(cr, uid, ids, context=context): user = vals.get('user_id') if user: ref = vals.get('ref', None) if ref: if ref.user_id.id != user[0]: defaults = {'user_id': user[0]} new_event = model_obj.copy(cr, uid, event, default=defaults, context=context) self.write(cr, uid, ids, {'state': 'accepted'}, context) return True
3b8a7a9b445e5bfe8ee513b642e480e5b4f08b04 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/3b8a7a9b445e5bfe8ee513b642e480e5b4f08b04/base_calendar.py
'priority': fields.selection(crm_report.AVAILABLE_PRIORITIES, 'Priority'),
'priority': fields.selection(AVAILABLE_PRIORITIES, 'Priority'),
def _get_data(self, cr, uid, ids, field_name, arg, context={}):
28c787901db8ae97b30d532da839c9036cdedc86 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/28c787901db8ae97b30d532da839c9036cdedc86/crm_claim_report.py
def set_context(self, objects, data, ids, report_type=None): PARTNER_REQUEST = '' if (data['model'] == 'res.partner'): ## Si on imprime depuis les partenaires if ids: #PARTNER_REQUEST = "AND line.partner_id IN (" + ','.join(map(str, ids)) + ")" PARTNER_REQUEST = "AND line.partner_id IN %s",(tuple(ids),) # Transformation des date # #
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
def set_context(self, objects, data, ids, report_type=None): PARTNER_REQUEST = '' if (data['model'] == 'res.partner'): ## Si on imprime depuis les partenaires if ids: #PARTNER_REQUEST = "AND line.partner_id IN (" + ','.join(map(str, ids)) + ")" PARTNER_REQUEST = "AND line.partner_id IN %s",(tuple(ids),) # Transformation des date # #
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
def set_context(self, objects, data, ids, report_type=None): PARTNER_REQUEST = '' if (data['model'] == 'res.partner'): ## Si on imprime depuis les partenaires if ids: #PARTNER_REQUEST = "AND line.partner_id IN (" + ','.join(map(str, ids)) + ")" PARTNER_REQUEST = "AND line.partner_id IN %s",(tuple(ids),) # Transformation des date # #
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
if self.date_lst and data['form']['initial_balance'] :
if data['form']['initial_balance']:
def set_context(self, objects, data, ids, report_type=None): PARTNER_REQUEST = '' if (data['model'] == 'res.partner'): ## Si on imprime depuis les partenaires if ids: #PARTNER_REQUEST = "AND line.partner_id IN (" + ','.join(map(str, ids)) + ")" PARTNER_REQUEST = "AND line.partner_id IN %s",(tuple(ids),) # Transformation des date # #
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
else:
def set_context(self, objects, data, ids, report_type=None): PARTNER_REQUEST = '' if (data['model'] == 'res.partner'): ## Si on imprime depuis les partenaires if ids: #PARTNER_REQUEST = "AND line.partner_id IN (" + ','.join(map(str, ids)) + ")" PARTNER_REQUEST = "AND line.partner_id IN %s",(tuple(ids),) # Transformation des date # #
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
def set_context(self, objects, data, ids, report_type=None): PARTNER_REQUEST = '' if (data['model'] == 'res.partner'): ## Si on imprime depuis les partenaires if ids: #PARTNER_REQUEST = "AND line.partner_id IN (" + ','.join(map(str, ids)) + ")" PARTNER_REQUEST = "AND line.partner_id IN %s",(tuple(ids),) # Transformation des date # #
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
def lines(self, partner,data): full_account = [] if data['form']['reconcil'] : RECONCILE_TAG = " " else: RECONCILE_TAG = "AND l.reconcile_id IS NULL"
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
if self.date_lst_string: self.cr.execute( "SELECT l.id,l.date,j.code, l.ref, l.name, l.debit, l.credit,l.amount_currency,c.code AS currency_code " \ "FROM account_move_line l " \ "LEFT JOIN account_journal j " \ "ON (l.journal_id = j.id) " \ "LEFT JOIN res_currency c on (l.currency_id=c.id)" \ "WHERE l.partner_id = %s " \ "AND l.account_id IN %s"\ "AND l.date IN (" + self.date_lst_string + ")" " " + RECONCILE_TAG + " "\ "ORDER BY l.id", (partner.id, tuple(self.account_ids),)) res = self.cr.dictfetchall() sum = 0.0 for r in res: sum = r['debit'] - r['credit'] r['progress'] = sum full_account.append(r)
self.cr.execute( "SELECT l.id,l.date,j.code, l.ref, l.name, l.debit, l.credit,l.amount_currency,c.code AS currency_code " \ "FROM account_move_line l " \ "LEFT JOIN account_journal j " \ "ON (l.journal_id = j.id) " \ "LEFT JOIN res_currency c on (l.currency_id=c.id)" \ "WHERE l.partner_id = %s " \ "AND l.account_id IN %s AND " + self.query +" " \ " " + RECONCILE_TAG + " "\ "ORDER BY l.id", (partner.id, tuple(self.account_ids))) res = self.cr.dictfetchall() sum = 0.0 for r in res: sum = r['debit'] - r['credit'] r['progress'] = sum full_account.append(r)
def lines(self, partner,data): full_account = [] if data['form']['reconcil'] : RECONCILE_TAG = " " else: RECONCILE_TAG = "AND l.reconcile_id IS NULL"
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py
if self.date_lst_string: self.cr.execute( "SELECT sum(debit) " \ "FROM account_move_line " \ "WHERE partner_id = %s " \ "AND account_id IN %s" \ " " + RECONCILE_TAG + " " \ "AND date IN (" + self.date_lst_string + ")" , (partner.id,tuple(self.account_ids),)) contemp = self.cr.fetchone() if contemp != None: result_tmp = contemp[0] or 0.0 else: result_tmp = result_tmp + 0.0
self.cr.execute( "SELECT sum(debit) " \ "FROM account_move_line AS l " \ "WHERE partner_id = %s " \ "AND account_id IN %s" \ " " + RECONCILE_TAG + " " \ "AND " + self.query + " " , (partner.id,tuple(self.account_ids),)) contemp = self.cr.fetchone() if contemp != None: result_tmp = contemp[0] or 0.0 else: result_tmp = result_tmp + 0.0
def _sum_debit_partner(self, partner, data): result_tmp = 0.0 if data['form']['reconcil'] : RECONCILE_TAG = " " else: RECONCILE_TAG = "AND reconcile_id IS NULL" if self.date_lst and data['form']['initial_balance'] : self.cr.execute( "SELECT sum(debit) " \ "FROM account_move_line " \ "WHERE partner_id = %s " \ "AND account_id IN %s" \ "AND reconcile_id IS NULL " \ "AND date < %s " , (partner.id, tuple(self.account_ids), self.date_lst[0],)) contemp = self.cr.fetchone() if contemp != None: result_tmp = contemp[0] or 0.0 else: result_tmp = result_tmp + 0.0
fbadca8d34f0137a629f314b5e51a2ecd4c769b8 /local1/tlutelli/issta_data/temp/all_python//python/2010_temp/2010/7397/fbadca8d34f0137a629f314b5e51a2ecd4c769b8/third_party_ledger.py