content
stringlengths
0
1.55M
<import_from_stmt>.cgc_type1_exploit CGCType1Exploit<import_from_stmt>.cgc_type2_exploit CGCType2Exploit<import_from_stmt>.cgc_exploit CGCExploit<import_from_stmt>.type1 CGCType1RopExploit CGCType1ShellcodeExploit CGCType1CircumstantialExploit<import_from_stmt>.type2 CGCType2RopExploit CGCType2ShellcodeExploit<line_sep>
"""Debiasing using reweighing"""<line_sep>""" This data recipe performs reweighing debiasing using the AIF360 package. https://github.com/Trusted-AI/AIF360 <NAME>., <NAME>. Data preprocessing techniques for classification without discrimination. Knowl Inf Syst 33, 1–33 (2012). https://doi.org/10.1007/s10115-011-0463-8 The transformer splits the original data as specified and returns training, validation, and test sets with weights added. 1. Update the folder_path and data_file variables to indicate the location of the dataset(s). 2. validation_test_files lists additional validation or test files that need to be updated with weights. 3. validation_split indicates the percentiles at which the original data should be split to create a validation and test set. If it's empty, no validation or test set is created. [0.7] would create a 70/30 training/validation split. [0.7, 0.9] would create a 70/20/10 training, validation, and test split. 4. target is the name of the target column. 5. favorable_label and unfavorable_label are the socially positive and negative target value respectively. 6. protected_group_info list of lists, where each sublist contains the name of a protected column, the unprivledged level, and the privleged level. Each of the protected columns must be binary. 7. From the Datasets section of driverless, click on ADD DATASET and then UPLOAD DATA RECIPE to upload this file. Be sure to use the specified validation set to be used for validation when a model is trained. The weights can cause leakage if the validation or test data is used for determining the weights. """<import_stmt>datatable<as>dt<import_stmt>numpy<as>np<import_stmt>os<import_from_stmt>h2oaicore.data CustomData<import_from_stmt>h2oaicore.systemutils config<class_stmt>MyReweightingData(CustomData)<block_start>_modules_needed_by_name=['datetime' 'fairlearn' 'aif360' 'sklearn']<line_sep>@staticmethod<def_stmt>create_data <block_start><import_stmt>pandas<as>pd<import_from_stmt>h2oaicore.models_utils import_tensorflow<line_sep>tf=import_tensorflow()<line_sep># above is because aif360 requires tensorflow <import_from_stmt>aif360.datasets BinaryLabelDataset<import_from_stmt>aif360.algorithms.preprocessing.reweighing Reweighing<line_sep>""" Update the below as needed """<line_sep>######### ######### ######### # Path to the data folder_path='tmp/'<line_sep># Data file data_file='housing_train_proc.csv'<line_sep>full_data_file=folder_path+data_file<if_stmt><not>os.path.isfile(full_data_file)# for testing, just return something <block_start><if_stmt>config.hard_asserts<block_start><return>dt.Frame(np.array([[1 2 3] [4 5 6]]))<block_end><else_stmt><block_start><return>[]<block_end><block_end>train=pd.read_csv(full_data_file)<line_sep>validation_test_files=['housing_test_proc.csv']<line_sep>validation_split=[0.6 0.8]<line_sep># Target column target='high_priced'<line_sep>favorable_label=0<line_sep>unfavorable_label=1<line_sep># Privleged_group_info = [[Protetected group name 1, prevleged level, unprivleged level], [Protetected group name 2, prevleged level, unprivleged level]] # The protected group columns need to be binary protected_group_info=[['hispanic' 0 1] ['black' 0 1]]<line_sep>######### ######### ######### # Set up protected group info protected_groups=[group_info[0]<for>group_info protected_group_info]<line_sep>dataset_orig=BinaryLabelDataset(df=train label_names=[target] favorable_label=favorable_label unfavorable_label=unfavorable_label protected_attribute_names=protected_groups)<line_sep>privileged_groups=[]<line_sep>unprivileged_groups=[]<for_stmt>protected_group protected_group_info<block_start>privileged_groups_dict={}<line_sep>unprivileged_groups_dict={}<line_sep>privileged_groups_dict[protected_group[0]]=protected_group[1]<line_sep>unprivileged_groups_dict[protected_group[0]]=protected_group[2]<line_sep>privileged_groups.append(privileged_groups_dict)<line_sep>unprivileged_groups.append(unprivileged_groups_dict)<block_end># Fit weights on the full dataset to be used on the external test set, if given RW_full=Reweighing(unprivileged_groups=unprivileged_groups privileged_groups=privileged_groups)<line_sep>RW_full.fit(dataset_orig)<line_sep># Split the original data into train, validation, and test if applicable <if_stmt>len(validation_split)<eq>1<block_start>dataset_orig_train,dataset_orig_valid=dataset_orig.split(validation_split shuffle=<true>)<block_end><elif_stmt>len(validation_split)<eq>2<block_start>dataset_orig_train_valid,dataset_orig_test=dataset_orig.split([validation_split[1]] shuffle=<true>)<line_sep># Fit the weights on both the validation and test set for the test set split RW_train_valid=Reweighing(unprivileged_groups=unprivileged_groups privileged_groups=privileged_groups)<line_sep>RW_train_valid.fit(dataset_orig_train_valid)<line_sep>dataset_orig_train,dataset_orig_valid=dataset_orig_train_valid.split([validation_split[0]/(validation_split[1])] shuffle=<true>)<block_end><else_stmt><block_start>dataset_orig_train=dataset_orig<block_end># Fit weights on the training set only RW=Reweighing(unprivileged_groups=unprivileged_groups privileged_groups=privileged_groups)<line_sep>RW.fit(dataset_orig_train)<line_sep>dataset_transf_train=RW.transform(dataset_orig_train)<line_sep># Add the weigts to the training set train_df=pd.DataFrame(dataset_transf_train.features columns=dataset_transf_train.feature_names)<line_sep>train_df[target]=dataset_transf_train.labels.ravel()<line_sep>train_df['weights']=dataset_transf_train.instance_weights.ravel()<line_sep># Create datasets with minimum features calculated the given number of days ahead dataset_dict={}<line_sep>dataset_dict[data_file.split('.')[0]+"_rw_train.csv"]=train_df<line_sep># Add weights to the validation split (if a validation split was specified) <if_stmt>len(validation_split)<ge>1<block_start>dataset_transf_valid=RW.transform(dataset_orig_valid)<line_sep>valid_df=pd.DataFrame(dataset_transf_valid.features columns=dataset_transf_valid.feature_names)<line_sep>valid_df[target]=dataset_transf_valid.labels.ravel()<line_sep>valid_df['weights']=dataset_transf_valid.instance_weights.ravel()<line_sep>dataset_dict[data_file.split('.')[0]+"_rw_validation.csv"]=valid_df<block_end># Add weights to the test split (if a test split was specified) <if_stmt>len(validation_split)<ge>2<block_start>dataset_transf_test=RW_train_valid.transform(dataset_orig_test)<line_sep>test_df=pd.DataFrame(dataset_transf_test.features columns=dataset_transf_test.feature_names)<line_sep>test_df[target]=dataset_transf_test.labels.ravel()<line_sep>test_df['weights']=dataset_transf_test.instance_weights.ravel()<line_sep>dataset_dict[data_file.split('.')[0]+"_rw_test.csv"]=test_df<block_end># Add weights to the test files (If provided) <for_stmt>valid_file validation_test_files<block_start>valid=pd.read_csv(folder_path+valid_file)<line_sep>dataset_valid_orig=BinaryLabelDataset(df=valid label_names=[target] favorable_label=favorable_label unfavorable_label=unfavorable_label protected_attribute_names=protected_groups)<line_sep>dataset_transf_valid=RW_full.transform(dataset_valid_orig)<line_sep>valid_df=pd.DataFrame(dataset_transf_valid.features columns=dataset_transf_valid.feature_names)<line_sep>valid_df[target]=dataset_transf_valid.labels.ravel()<line_sep>valid_df['weights']=dataset_transf_valid.instance_weights.ravel()<line_sep>dataset_dict[valid_file.split('.')[0]+"_rw_transformed.csv"]=valid_df<block_end><return>dataset_dict<block_end><block_end>
<import_stmt>pytest<import_stmt>importlib<line_sep>@pytest.fixture(scope='module' params=['ipynb.fs.defs.pure_ipynb.foo' 'ipynb.fs.defs.mixed_ipynb.foo'])<def_stmt>foo request<block_start><return>importlib.import_module(request.param)<block_end>@pytest.fixture(scope='module' params=['ipynb.fs.defs.pure_ipynb' 'ipynb.fs.defs.mixed_ipynb'])<def_stmt>init request<block_start><return>importlib.import_module(request.param)<block_end><def_stmt>test_execute foo<block_start><assert_stmt>foo.foo()<eq>'foo'<line_sep>rawr=foo.RAWR()<assert_stmt>rawr.rawr()<eq>'rawr'<block_end><def_stmt>test_no_execute foo<block_start><assert_stmt><not>hasattr(foo 'bar')<assert_stmt><not>hasattr(foo 'r')<block_end><def_stmt>test_allcaps_execute foo<block_start><assert_stmt>foo.WAT<eq>'boo'<block_end><def_stmt>test_all init<block_start>r=init.RAWR()<assert_stmt>r.rawr()<eq>'rawr'<block_end><def_stmt>test_bogus_ipynb <block_start><with_stmt>pytest.raises(ImportError)<block_start><import_stmt>ipynb.fs.defs.bogus_ipynb<as>bogus_ipynb<block_end><block_end><def_stmt>test_r_notebook <block_start><with_stmt>pytest.raises(ImportError)<block_start><import_stmt>ipynb.fs.defs.r_notebook<block_end><block_end><def_stmt>test_nbformat_2 <block_start><with_stmt>pytest.raises(ImportError)<block_start><import_stmt>ipynb.fs.defs.older_nbformat<block_end><block_end>
""" ################################################################################################## # Copyright Info : Copyright (c) Davar Lab @ Hikvision Research Institute. All rights reserved. # Filename : __init__.py # Abstract : # Current Version: 1.0.0 # Date : 2021-05-20 ################################################################################################## """<import_from_stmt>.builder POSTPROCESS build_postprocess<import_from_stmt>.evaluation DavarDistEvalHook DavarEvalHook<line_sep>__all__=['POSTPROCESS' 'build_postprocess' "DavarEvalHook" "DavarDistEvalHook" ]<line_sep>
# <NAME> # Copyright (c) 2018 - 2019 AXeL <import_stmt>gi<line_sep>gi.require_version('Gtk' '3.0')<import_from_stmt>gi.repository Gtk Gdk Pango<import_from_stmt>lib.tools fit_position_to_destination<import_from_stmt>lib.parser parse_color<import_stmt>math<class_stmt>CustomComboBox(Gtk.ComboBoxText)<block_start><def_stmt>__init__ self data_list=[] sort=<false><block_start>Gtk.ComboBoxText.__init__(self)<line_sep># set max chars width <for_stmt>renderer self.get_cells()<block_start>renderer.props.max_width_chars=10<line_sep>renderer.props.ellipsize=Pango.EllipsizeMode.END<block_end># append data self.append_list(data_list sort)<block_end><def_stmt>append_list self data_list sort=<false> clear=<false># clear combobox <block_start><if_stmt>clear<block_start>self.remove_all()<block_end># sort data <if_stmt>sort<block_start>data_list=sorted(data_list)<block_end># append data <for_stmt>text data_list<block_start>self.append_text(text)<block_end><block_end><def_stmt>sync_with_combo self combo use_contains=<false><block_start><if_stmt>self.get_active()<ne>-1<and>combo.get_active()<ne>-1# do not allow same text at same time <block_start>self_text=self.get_active_text()<line_sep>combo_text=combo.get_active_text()<if_stmt>(use_contains<and>(self_text<in>combo_text<or>combo_text<in>self_text))<or>self_text<eq>combo_text<block_start>combo.set_active(-1)<block_end><block_end><block_end><block_end><class_stmt>TextValueComboBox(Gtk.ComboBox)<block_start><def_stmt>__init__ self data_list=[] model=<none> text_key=<none> value_key=<none> sort_key=<none><block_start>Gtk.ComboBox.__init__(self)<line_sep># set max chars width renderer_text=Gtk.CellRendererText()<line_sep>renderer_text.props.max_width_chars=10<line_sep>renderer_text.props.ellipsize=Pango.EllipsizeMode.END<line_sep># append data <if_stmt>model<is><none><block_start>self.model=Gtk.ListStore(str str)<block_end><else_stmt><block_start>self.model=model<block_end>self.append_list(data_list text_key value_key sort_key)<line_sep>self.set_model(self.model)<line_sep>self.pack_start(renderer_text <true>)<line_sep>self.add_attribute(renderer_text 'text' 0)<line_sep># save data list values (for further use) self.values=[item[value_key]<for>item data_list]<block_end><def_stmt>append_list self data_list text_key value_key sort_key=<none> clear=<false># clear combobox <block_start><if_stmt>clear<block_start>self.remove_all()<block_end># sort data <if_stmt>sort_key<is><not><none><block_start>data_list=sorted(data_list key=<lambda>item:item[sort_key])<block_end># append data <if_stmt>text_key<is><not><none><and>value_key<is><not><none><block_start><for_stmt>data data_list<block_start>self.model.append([data[text_key] data[value_key]])<block_end><block_end><block_end><def_stmt>_get_active self index<block_start>active=self.get_active()<if_stmt>active<ne>-1<block_start><return>self.model[active][index]<block_end><else_stmt><block_start><return><none><block_end><block_end><def_stmt>get_active_text self<block_start><return>self._get_active(0)<block_end><def_stmt>get_active_value self<block_start><return>self._get_active(1)<block_end><def_stmt>set_active_value self value<block_start>self.set_active(self.values.index(value))<block_end><def_stmt>remove_all self<block_start>self.model.clear()<block_end><block_end><class_stmt>CustomTreeView(Gtk.Frame)<block_start><def_stmt>__init__ self model columns<block_start>Gtk.Frame.__init__(self)<line_sep>self.perform_scroll=<false><line_sep>self.model=model<line_sep>self.columns=columns<line_sep>self.vbox=Gtk.Box(orientation=Gtk.Orientation.VERTICAL)<line_sep>self.add(self.vbox)<line_sep>## ScrolledWindow scrolled_window=Gtk.ScrolledWindow()<line_sep>self.vbox.pack_start(scrolled_window <true> <true> 0)<line_sep># TreeView self.tree_view=Gtk.TreeView(model)<line_sep>scrolled_window.add(self.tree_view)<for_stmt>column columns<block_start>self.tree_view.append_column(column)<block_end>self.tree_view.connect('size-allocate' self.scroll_tree_view)<line_sep>self.selection=self.tree_view.get_selection()<line_sep>self.selection.set_mode(Gtk.SelectionMode.SINGLE)<block_end><def_stmt>is_empty self<block_start><return>len(self.model)<eq>0<block_end><def_stmt>connect self event_name event_callback<block_start><if_stmt>event_name<eq>'selection-changed'<block_start>self.selection.connect('changed' event_callback)<block_end><else_stmt><block_start>self.tree_view.connect(event_name event_callback)<block_end><block_end><def_stmt>append_row self row select=<true> scroll_to=<true># append row <block_start>self.model.append(row)<line_sep># scroll to row <if_stmt>scroll_to<block_start>self.perform_scroll=<true><block_end># select row <if_stmt>select<block_start>index=len(self.model)-1<line_sep>self.select_row(index)<block_end><block_end><def_stmt>select_row self index<block_start>path=Gtk.TreePath(index)<line_sep>self.selection.select_path(path)<block_end><def_stmt>get_row_index self row_iter<block_start><return>self.model.get_path(row_iter).get_indices()[0]<block_end><def_stmt>get_rows_count self<block_start><return>len(self.model)<block_end><def_stmt>get_selected_row self<block_start>model,tree_iter=self.selection.get_selected()<if_stmt>tree_iter<block_start>row=[]<for_stmt>i range(len(self.columns))<block_start>column_value=model.get_value(tree_iter i)<line_sep>row.append(column_value)<block_end><return>row<block_end><else_stmt><block_start><return><none><block_end><block_end><def_stmt>remove_selected_row self# remove selected row <block_start>model,tree_iter=self.selection.get_selected()<if_stmt>tree_iter<block_start>model.remove(tree_iter)<block_end><block_end><def_stmt>scroll_tree_view self widget event<block_start><if_stmt>self.perform_scroll<block_start>adj=widget.get_vadjustment()<line_sep>adj.set_value(adj.get_upper()-adj.get_page_size())<line_sep>self.perform_scroll=<false><block_end><block_end><block_end><class_stmt>CustomListBox(Gtk.Frame)<block_start><def_stmt>__init__ self parent=<none> allow_moving=<true><block_start>Gtk.Frame.__init__(self)<line_sep>self.parent=parent<line_sep>self.allow_moving=allow_moving<line_sep>self.perform_scroll=<false><line_sep>self.add_callback=<none><line_sep>self.delete_callback=<none><line_sep>self.activate_callback=<none><line_sep>## ListBox vbox=Gtk.Box(orientation=Gtk.Orientation.VERTICAL)<line_sep>self.add(vbox)<line_sep>self.listbox=Gtk.ListBox()<line_sep>self.listbox.set_selection_mode(Gtk.SelectionMode.SINGLE)<line_sep>self.listbox.connect('size-allocate' self.on_size_allocate)<line_sep>self.listbox.connect('row-activated' self.on_row_activated)<line_sep>scrolled_window=Gtk.ScrolledWindow()<line_sep>scrolled_window.add(self.listbox)<line_sep>vbox.pack_start(scrolled_window <true> <true> 0)<line_sep>## ActionBar actionbar=Gtk.ActionBar()<line_sep>vbox.pack_end(actionbar <false> <false> 0)<line_sep>default_buttons_box=ButtonBox(linked=<true>)<line_sep>actionbar.pack_start(default_buttons_box)<if_stmt>allow_moving# Move up <block_start>self.move_up_button=Gtk.Button()<line_sep>self.move_up_button.set_tooltip_text('Move up')<line_sep>self.move_up_button.set_image(Gtk.Image(icon_name='go-up-symbolic'))<line_sep>self.move_up_button.connect('clicked' self.on_move_up_button_clicked)<line_sep>default_buttons_box.add(self.move_up_button)<line_sep># Move down self.move_down_button=Gtk.Button()<line_sep>self.move_down_button.set_tooltip_text('Move down')<line_sep>self.move_down_button.set_image(Gtk.Image(icon_name='go-down-symbolic'))<line_sep>self.move_down_button.connect('clicked' self.on_move_down_button_clicked)<line_sep>default_buttons_box.add(self.move_down_button)<block_end># Delete self.delete_button=Gtk.Button()<line_sep>self.delete_button.set_tooltip_text('Delete')<line_sep>self.delete_button.set_image(Gtk.Image(icon_name='edit-delete-symbolic'))<line_sep>self.delete_button.connect('clicked' self.on_delete_button_clicked)<line_sep>default_buttons_box.add(self.delete_button)<line_sep># Clear all self.clear_all_button=Gtk.Button()<line_sep>self.clear_all_button.set_tooltip_text('Clear all')<line_sep>self.clear_all_button.set_image(Gtk.Image(icon_name='edit-clear-all-symbolic'))<line_sep>self.clear_all_button.connect('clicked' self.on_clear_all_button_clicked)<line_sep>default_buttons_box.add(self.clear_all_button)<line_sep># Initialise default buttons status self.reset_buttons()<line_sep># Buttons box self.buttons_box=ButtonBox(linked=<true>)<line_sep>actionbar.pack_end(self.buttons_box)<block_end><def_stmt>on_add self callback<block_start>self.add_callback=callback<block_end><def_stmt>on_delete self callback<block_start>self.delete_callback=callback<block_end><def_stmt>on_activate self callback<block_start>self.activate_callback=callback<block_end><def_stmt>on_row_activated self listbox row<block_start><if_stmt>self.allow_moving<block_start>rows_count=len(self.get_rows())<line_sep>index=row.get_index()<line_sep># Move up enable_move_up=<true><if>index<g>0<else><false><line_sep>self.move_up_button.set_sensitive(enable_move_up)<line_sep># Move down enable_move_down=<true><if>index<l>rows_count-1<else><false><line_sep>self.move_down_button.set_sensitive(enable_move_down)<block_end># Delete self.delete_button.set_sensitive(<true>)<line_sep># Clear all self.clear_all_button.set_sensitive(<true>)<line_sep># Invoke activate callback <if_stmt>self.activate_callback<is><not><none><block_start>self.activate_callback()<block_end><block_end><def_stmt>on_size_allocate self listbox event<block_start><if_stmt>self.perform_scroll<block_start>adj=listbox.get_adjustment()<line_sep>adj.set_value(adj.get_upper()-adj.get_page_size())<line_sep>self.perform_scroll=<false><block_end><block_end><def_stmt>add_button self button<block_start>self.buttons_box.add(button)<block_end><def_stmt>append_text self text# add new row <block_start>row=Gtk.ListBoxRow()<line_sep>label=Gtk.Label(text xalign=0 margin=5)<line_sep>row.add(label)<line_sep>self.listbox.add(row)<line_sep>self.listbox.show_all()<line_sep>self.perform_scroll=<true><line_sep>self.select_row(row)<if_stmt>self.add_callback<is><not><none><block_start>self.add_callback()<block_end><block_end><def_stmt>select_row self row<block_start>self.listbox.select_row(row)<line_sep>self.on_row_activated(self.listbox row)<block_end><def_stmt>get_rows self<block_start><return>self.listbox.get_children()<block_end><def_stmt>is_empty self<block_start><return>len(self.get_rows())<eq>0<block_end><def_stmt>get_row_text self row<block_start>label=row.get_children()[0]<line_sep><return>label.get_text()<block_end><def_stmt>reset_buttons self<block_start><if_stmt>self.allow_moving<block_start>self.move_up_button.set_sensitive(<false>)<line_sep>self.move_down_button.set_sensitive(<false>)<block_end>self.delete_button.set_sensitive(<false>)<if_stmt>self.is_empty()<block_start>self.clear_all_button.set_sensitive(<false>)<block_end><block_end><def_stmt>remove_row self row reset=<true><block_start>row_index=row.get_index()<line_sep>self.listbox.remove(row)<if_stmt>reset<block_start>self.reset_buttons()<block_end><if_stmt>self.delete_callback<is><not><none><block_start>self.delete_callback(row_index)<block_end><block_end><def_stmt>on_delete_button_clicked self button<block_start>row=self.listbox.get_selected_row()<line_sep>self.remove_row(row)<block_end><def_stmt>move_row self row new_index<block_start>self.listbox.select_row(<none>)# remove selection self.listbox.remove(row)<line_sep>self.listbox.insert(row new_index)<line_sep>self.select_row(row)<block_end><def_stmt>on_move_up_button_clicked self button<block_start>row=self.listbox.get_selected_row()<if_stmt>row<block_start>index=row.get_index()<line_sep>self.move_row(row index-1)<block_end><block_end><def_stmt>on_move_down_button_clicked self button<block_start>row=self.listbox.get_selected_row()<if_stmt>row<block_start>index=row.get_index()<line_sep>self.move_row(row index+1)<block_end><block_end><def_stmt>clear self<block_start><for_stmt>row self.get_rows()<block_start>self.remove_row(row <false>)<block_end>self.reset_buttons()<block_end><def_stmt>on_clear_all_button_clicked self button<block_start>dialog=Gtk.MessageDialog(text='Confirm clear all?' transient_for=self.parent buttons=Gtk.ButtonsType.OK_CANCEL message_type=Gtk.MessageType.QUESTION)<line_sep>response=dialog.run()<line_sep>dialog.destroy()<line_sep># We only clear when the user presses the OK button <if_stmt>response<eq>Gtk.ResponseType.OK<block_start>self.clear()<block_end><block_end><block_end><class_stmt>SpinButton(Gtk.SpinButton)<block_start><def_stmt>__init__ self min=0 max=100 value=0 step=1 page_step=5<block_start>adjustment=Gtk.Adjustment(value=value lower=min upper=max step_increment=step page_increment=page_step page_size=0)<line_sep>Gtk.SpinButton.__init__(self adjustment=adjustment)<block_end><block_end><class_stmt>ImageLabel(Gtk.Box)<block_start><def_stmt>__init__ self image text padding=0<block_start>Gtk.Box.__init__(self orientation=Gtk.Orientation.HORIZONTAL spacing=3)<line_sep>self.set_border_width(padding)<line_sep>self.add(image)<line_sep>self.label=Gtk.Label(text ellipsize=Pango.EllipsizeMode.END)<line_sep>self.label.set_tooltip_text(text)<line_sep>self.add(self.label)<block_end><def_stmt>get_text self<block_start><return>self.label.get_text()<block_end><block_end><class_stmt>StackListBox(Gtk.Box)<block_start><def_stmt>__init__ self<block_start>Gtk.Box.__init__(self orientation=Gtk.Orientation.HORIZONTAL spacing=5)<line_sep>self.count=0<line_sep>frame=Gtk.Frame()<line_sep>scrolled_window=Gtk.ScrolledWindow(hscrollbar_policy=Gtk.PolicyType.NEVER)<line_sep>self.listbox=Gtk.ListBox()<line_sep>self.listbox.set_selection_mode(Gtk.SelectionMode.SINGLE)<line_sep>self.listbox.connect('row-activated' self.on_row_activated)<line_sep>scrolled_window.add(self.listbox)<line_sep>frame.add(scrolled_window)<line_sep>self.pack_start(frame <true> <true> 0)<line_sep>self.stack=Gtk.Stack()<line_sep>self.pack_end(self.stack <false> <false> 0)<block_end><def_stmt>on_row_activated self listbox row<block_start>name=row.get_children()[0].get_text()<line_sep>self.stack.set_visible_child_name(name)<block_end><def_stmt>append self label widget# add listbox label <block_start>self.listbox.add(label)<if_stmt>self.count<eq>0# select first row <block_start>self.listbox.select_row(self.listbox.get_row_at_index(self.count))<block_end># add stack widget self.stack.add_named(widget label.get_text())<line_sep>self.count<augadd>1<block_end><block_end><class_stmt>ButtonBox(Gtk.Box)<block_start><def_stmt>__init__ self orientation=Gtk.Orientation.HORIZONTAL spacing=5 centered=<false> linked=<false><block_start>Gtk.Box.__init__(self orientation=orientation)<line_sep>self.buttons_container=Gtk.Box(orientation=orientation)<line_sep>self.orientation=orientation<line_sep>self.linked=linked<line_sep># set centered <if_stmt>centered<block_start>self.pack_start(self.buttons_container <true> <false> 0)<block_end><else_stmt><block_start>self.pack_start(self.buttons_container <false> <false> 0)<block_end># set linked <if_stmt>linked<block_start>Gtk.StyleContext.add_class(self.buttons_container.get_style_context() Gtk.STYLE_CLASS_LINKED)<block_end><else_stmt><block_start>self.buttons_container.set_spacing(spacing)<block_end><block_end><def_stmt>add self button<block_start><if_stmt>self.orientation<eq>Gtk.Orientation.VERTICAL<and><not>self.linked<block_start>hbox=Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)<line_sep>hbox.pack_start(button <true> <false> 0)<line_sep>self.buttons_container.add(hbox)<block_end><else_stmt><block_start>self.buttons_container.add(button)<block_end><block_end><block_end><class_stmt>MessageBox(Gtk.Box)<block_start><def_stmt>__init__ self text=<none> color='black' enable_buttons=<false><block_start>Gtk.Box.__init__(self orientation=Gtk.Orientation.HORIZONTAL spacing=5)<line_sep>self.enable_buttons=enable_buttons<line_sep># label self.label=Gtk.Label(text)<line_sep>self.label.modify_fg(Gtk.StateType.NORMAL Gdk.color_parse(color))<line_sep>self.add(self.label)<line_sep># question buttons <if_stmt>enable_buttons<block_start>self.button_box=ButtonBox(linked=<true>)<line_sep>self.pack_end(self.button_box <false> <false> 0)<line_sep># yes self.yes_button=Gtk.Button()<line_sep>self.yes_button.set_tooltip_text('Yes')<line_sep>self.yes_button.set_image(Gtk.Image(icon_name='emblem-ok-symbolic'))<line_sep>self.button_box.add(self.yes_button)<line_sep># no self.no_button=Gtk.Button()<line_sep>self.no_button.set_tooltip_text('No')<line_sep>self.no_button.set_image(Gtk.Image(icon_name='window-close-symbolic'))<line_sep>self.button_box.add(self.no_button)<block_end><block_end><def_stmt>print_message self text is_question=<false><block_start>self.label.set_text(text)<if_stmt>self.enable_buttons<block_start><if_stmt>is_question<block_start>self.button_box.show()<block_end><else_stmt><block_start>self.button_box.hide()<block_end><block_end>self.show()<block_end><block_end><class_stmt>MenuButton(Gtk.Button)<block_start><def_stmt>__init__ self text=<none> position=Gtk.PositionType.BOTTOM icon_name=<none> padding=2<block_start>Gtk.Button.__init__(self text)<if_stmt>icon_name<is><not><none><block_start>self.set_image(Gtk.Image(icon_name=icon_name))<block_end>self.connect('clicked' self.on_clicked)<line_sep># popover self.popover=Gtk.Popover(relative_to=self position=position)<line_sep>self.popover.set_border_width(padding)<block_end><def_stmt>on_clicked self button<block_start>self.popover.show_all()<block_end><def_stmt>add self widget<block_start>self.popover.add(widget)<block_end><block_end><class_stmt>MenuImage(Gtk.EventBox)<block_start><def_stmt>__init__ self icon_name='pan-down-symbolic' pixel_size=13 position=Gtk.PositionType.BOTTOM padding=2<block_start>Gtk.EventBox.__init__(self)<line_sep>self.add(Gtk.Image(icon_name=icon_name pixel_size=pixel_size))<line_sep>self.connect('button-press-event' self.on_button_press)<line_sep>self.connect('enter-notify-event' self.on_enter_notify)<line_sep># popover self.popover=Gtk.Popover(relative_to=self position=position)<line_sep>self.popover.set_border_width(padding)<block_end><def_stmt>on_button_press self widget event<block_start>self.popover.show_all()<block_end><def_stmt>on_enter_notify self widget event<block_start>window=self.get_window()<line_sep>window.set_cursor(Gdk.Cursor(Gdk.CursorType.HAND1))<block_end><def_stmt>set_widget self widget<block_start>self.popover.add(widget)<block_end><block_end><class_stmt>FileChooserButton(Gtk.FileChooserButton)<block_start><def_stmt>__init__ self title filter=<none><block_start>Gtk.FileChooserButton.__init__(self title=title)<if_stmt>filter<is><not><none><and>len(filter)<g>1<block_start>name,pattern=filter<line_sep>file_filter=Gtk.FileFilter()<line_sep>file_filter.set_name('%s (%s)'%(name pattern))<line_sep>file_filter.add_pattern(pattern)<line_sep>self.add_filter(file_filter)<block_end><block_end><block_end><class_stmt>MiniMap(Gtk.Frame)<block_start>point_colors={'Monster':'red' 'Resource':'green' 'NPC':'blue' 'None':'black'}<def_stmt>__init__ self background_color='#CECECE' show_grid=<true> grid_color='#DDDDDD' grid_size=(15 15) point_radius=3<block_start>Gtk.Frame.__init__(self)<line_sep>self.points=[]<line_sep>self.point_opacity=0.7<line_sep>self.point_radius=point_radius<line_sep>self.show_grid=show_grid<line_sep>self.grid_color=grid_color<line_sep>self.grid_size=grid_size<line_sep>self.background_color=background_color<line_sep>self.use_origin_colors=<false><line_sep>self.add_borders=<false><line_sep>self.drawing_area=Gtk.DrawingArea()<line_sep>self.drawing_area.set_has_tooltip(<true>)<line_sep>self.drawing_area.connect('draw' self.on_draw)<line_sep>self.drawing_area.connect('query-tooltip' self.on_query_tooltip)<line_sep>self.add(self.drawing_area)<block_end><def_stmt>set_use_origin_colors self value<block_start>self.use_origin_colors=value<if_stmt>self.points<block_start>self.drawing_area.queue_draw()<block_end><block_end><def_stmt>set_add_borders self value<block_start>self.add_borders=value<if_stmt>self.points<block_start>self.drawing_area.queue_draw()<block_end><block_end><def_stmt>get_color_key self<block_start><return>'origin_color'<if>self.use_origin_colors<else>'color'<block_end><def_stmt>add_point self point name=<none> color=<none> redraw=<true># set point coordinates <block_start>new_point={'x':point['x'] 'y':point['y'] 'width':point['width'] 'height':point['height']}<line_sep># set point name <if_stmt>name<is><not><none><block_start>new_point['name']=name<block_end><elif_stmt>'name'<in>point<block_start>new_point['name']=point['name']<block_end><else_stmt><block_start>new_point['name']=<none><block_end># set point color new_point['color']=color<line_sep>new_point['origin_color']=parse_color(point['color'] as_hex=<true>)<if>'color'<in>point<else><none><line_sep># add point self.points.append(new_point)<if_stmt>redraw<block_start>self.drawing_area.queue_draw()<block_end><block_end><def_stmt>add_points self points name=<none> color=<none><block_start><for_stmt>point points<block_start>self.add_point(point name color <false>)<block_end>self.drawing_area.queue_draw()<block_end><def_stmt>remove_point self index<block_start><if_stmt>0<le>index<l>len(self.points)<block_start><del_stmt>self.points[index]<line_sep>self.drawing_area.queue_draw()<block_end><block_end><def_stmt>clear self<block_start><if_stmt>self.points<block_start>self.points=[]<line_sep>self.drawing_area.queue_draw()<block_end><block_end><def_stmt>on_draw self widget cr<block_start>drawing_area=widget.get_allocation()<line_sep>square_width,square_height=self.grid_size<line_sep>cr.set_line_width(1)<line_sep># set color function <def_stmt>set_color value opacity=1.0<block_start>color=Gdk.color_parse(value)<line_sep>cr.set_source_rgba(float(color.red)/65535 float(color.green)/65535 float(color.blue)/65535 opacity)<block_end># fill background with color <if_stmt>self.background_color<block_start>cr.rectangle(0 0 drawing_area.width drawing_area.height)<line_sep>set_color(self.background_color)<line_sep>cr.fill()<block_end># draw grid lines <if_stmt>self.show_grid<block_start>set_color(self.grid_color)<line_sep># draw vertical lines <for_stmt>x range(square_width drawing_area.width square_width+1)# +1 for line width <block_start>cr.move_to(x+0.5 0)# +0.5 for smooth line cr.line_to(x+0.5 drawing_area.height)<block_end># draw horizontal lines <for_stmt>y range(square_height drawing_area.height square_height+1)<block_start>cr.move_to(0 y+0.5)<line_sep>cr.line_to(drawing_area.width y+0.5)<block_end>cr.stroke()<block_end># draw points <for_stmt>point self.points# fit point to drawing area (should keep here, because it's useful when drawing area get resized) <block_start>x,y=fit_position_to_destination(point['x'] point['y'] point['width'] point['height'] drawing_area.width drawing_area.height)<if_stmt>self.add_borders<block_start>set_color('black')<block_end>cr.arc(x y self.point_radius 0 2<times>math.pi)<if_stmt>self.add_borders<block_start>cr.stroke_preserve()<block_end>color_key=self.get_color_key()<line_sep>color=self.point_colors['None']<if>point[color_key]<is><none><else>point[color_key]<line_sep>set_color(color self.point_opacity)<line_sep>cr.fill()<block_end><block_end><def_stmt>get_tooltip_widget self point# on draw function <block_start><def_stmt>on_draw widget cr<block_start>cr.set_line_width(1)<line_sep># draw point color_key=self.get_color_key()<line_sep>color=Gdk.color_parse(point[color_key])<line_sep>cr.set_source_rgba(float(color.red)/65535 float(color.green)/65535 float(color.blue)/65535 self.point_opacity)<line_sep>cr.arc(self.point_radius self.point_radius self.point_radius 0 2<times>math.pi)<line_sep>cr.fill()<block_end># tooltip widget <if_stmt>point['name']<is><not><none><block_start>widget=Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL spacing=3)<line_sep>color_key=self.get_color_key()<if_stmt>point[color_key]<is><not><none><block_start>drawing_area=Gtk.DrawingArea()<line_sep>point_diameter=self.point_radius<times>2<line_sep>drawing_area.set_size_request(point_diameter point_diameter)<line_sep>drawing_area.connect('draw' on_draw)<line_sep>box=Gtk.Box(orientation=Gtk.Orientation.VERTICAL)<line_sep>box.pack_start(drawing_area <true> <false> 0)<line_sep>widget.add(box)<block_end>widget.add(Gtk.Label(point['name']))<line_sep>widget.show_all()<block_end><else_stmt><block_start>widget=<none><block_end><return>widget<block_end><def_stmt>on_query_tooltip self widget x y keyboard_mode tooltip<block_start>drawing_area=self.drawing_area.get_allocation()<line_sep>tooltip_widget=<none><line_sep># check if a point is hovered <for_stmt>point self.points# fit point to drawing area <block_start>point_x,point_y=fit_position_to_destination(point['x'] point['y'] point['width'] point['height'] drawing_area.width drawing_area.height)<line_sep># TODO: the check below should be circular, not rectangular <if_stmt>point_x-self.point_radius<le>x<le>point_x+self.point_radius<and>point_y-self.point_radius<le>y<le>point_y+self.point_radius<block_start>tooltip_widget=self.get_tooltip_widget(point)<line_sep><break><block_end><block_end># if so <if_stmt>tooltip_widget<is><not><none># set tooltip widget <block_start>tooltip.set_custom(tooltip_widget)<line_sep># show the tooltip <return><true><block_end><else_stmt><block_start><return><false><block_end><block_end><block_end>
""" Copyright (c) 2018-2021 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """<import_stmt>numpy<as>np<import_stmt>cv2<import_from_stmt>.format_converter DirectoryBasedAnnotationConverter ConverterReturn<import_from_stmt>..utils UnsupportedPackage<import_from_stmt>..representation CharacterRecognitionAnnotation<import_from_stmt>..config BoolField<try_stmt><block_start><import_stmt>lmdb<block_end><except_stmt>ImportError<as>import_error<block_start>lmdb=UnsupportedPackage("lmdb" import_error.msg)<block_end><class_stmt>LMDBConverter(DirectoryBasedAnnotationConverter)<block_start>__provider__='lmdb_text_recognition_database'<line_sep>annotation_types=(CharacterRecognitionAnnotation )<line_sep>supported_symbols='0123456789abcdefghijklmnopqrstuvwxyz'<line_sep>@classmethod<def_stmt>parameters cls<block_start>configuration_parameters=super().parameters()<line_sep>configuration_parameters.update({'lower_case':BoolField(description='Convert GT text to lowercase.' optional=<true>)})<line_sep><return>configuration_parameters<block_end><def_stmt>configure self<block_start>super().configure()<line_sep>self.lower_case=self.get_value_from_config('lower_case')<block_end><def_stmt>convert self check_content=<false> progress_callback=<none> progress_interval=100 **kwargs<block_start>"""Reads data from disk and returns dataset in converted for AC format Args: check_content (bool, optional): Check if content is valid. Defaults to False. progress_callback (bool, optional): Display progress. Defaults to None. progress_interval (int, optional): Units to display progress. Defaults to 100 (percent). Returns: [type]: Converted dataset """<line_sep>annotations=[]<line_sep>content_errors=<none><if><not>check_content<else>[]<line_sep>lmdb_env=lmdb.open(bytes(self.data_dir) readonly=<true>)<with_stmt>lmdb_env.begin(write=<false>)<as>txn<block_start>num_iterations=int(txn.get('num-samples'.encode()))<for_stmt>index range(1 num_iterations+1)<block_start>label_key=f'label-{index:09d}'.encode()<line_sep>text=txn.get(label_key).decode('utf-8')<if_stmt>self.lower_case<block_start>text=text.lower()<block_end><if_stmt>progress_callback<is><not><none><and>index%progress_interval<eq>0<block_start>progress_callback(index/num_iterations<times>100)<block_end><if_stmt>check_content<block_start>img_key=f'label-{index:09d}'.encode()<line_sep>image_bytes=txn.get(img_key)<line_sep>image=cv2.imdecode(np.frombuffer(image_bytes np.uint8) cv2.IMREAD_ANYCOLOR)<if_stmt>image<is><none><block_start>content_errors.append(f'label-{index:09d}: does not exist')<block_end><block_end>annotations.append(CharacterRecognitionAnnotation(index text))<block_end><block_end>label_map={ind:str(key)<for>ind,key enumerate(self.supported_symbols)}<line_sep>meta={'label_map':label_map 'blank_label':len(label_map)}<line_sep><return>ConverterReturn(annotations meta content_errors)<block_end><block_end>
"""A setuptools based setup module. See: https://packaging.python.org/en/latest/distributing.html https://github.com/pypa/sampleproject """<line_sep># Always prefer setuptools over distutils <import_from_stmt>setuptools setup find_packages<line_sep>setup(package_dir={"":"src"} packages=find_packages(where="src") )<line_sep>
<import_stmt>sys<import_from_stmt>typing Dict Optional SupportsFloat<import_stmt>pendulum# type: ignore <import_from_stmt>pathlib Path<import_from_stmt>organize.utils DotDict<import_from_stmt>.filter Filter<class_stmt>Created(Filter)<block_start>""" Matches files by created date :param int years: specify number of years :param int months: specify number of months :param float weeks: specify number of weeks :param float days: specify number of days :param float hours: specify number of hours :param float minutes: specify number of minutes :param float seconds: specify number of seconds :param str mode: either 'older' or 'newer'. 'older' matches all files created before the given time, 'newer' matches all files created within the given time. (default = 'older') :param str timezone: specify timezone :returns: - ``{created.year}`` -- the year the file was created - ``{created.month}`` -- the month the file was created - ``{created.day}`` -- the day the file was created - ``{created.hour}`` -- the hour the file was created - ``{created.minute}`` -- the minute the file was created - ``{created.second}`` -- the second the file was created Examples: - Show all files on your desktop created at least 10 days ago: .. code-block:: yaml :caption: config.yaml rules: - folders: '~/Desktop' filters: - created: days: 10 actions: - echo: 'Was created at least 10 days ago' - Show all files on your desktop which were created within the last 5 hours: .. code-block:: yaml :caption: config.yaml rules: - folders: '~/Desktop' filters: - created: hours: 5 mode: newer actions: - echo: 'Was created within the last 5 hours' - Sort pdfs by year of creation: .. code-block:: yaml :caption: config.yaml rules: - folders: '~/Documents' filters: - extension: pdf - created actions: - move: '~/Documents/PDF/{created.year}/' - Use specific timezone when processing files .. code-block:: yaml :caption: config.yaml rules: - folders: '~/Documents' filters: - extension: pdf - created: timezone: "Europe/Moscow" actions: - move: '~/Documents/PDF/{created.day}/{created.hour}/' """<def_stmt>__init__ self years=0 months=0 weeks=0 days=0 hours=0 minutes=0 seconds=0 mode="older" timezone=pendulum.tz.local_timezone() <arrow><none><block_start>self._mode=mode.strip().lower()<if_stmt>self._mode<not><in>("older" "newer")<block_start><raise>ValueError("Unknown option for 'mode': must be 'older' or 'newer'.")<block_end>self.is_older=self._mode<eq>"older"<line_sep>self.timezone=timezone<line_sep>self.timedelta=pendulum.duration(years=years months=months weeks=weeks days=days hours=hours minutes=minutes seconds=seconds )<line_sep>print(bool(self.timedelta))<block_end><def_stmt>pipeline self args:DotDict<arrow>Optional[Dict[str pendulum.DateTime]]<block_start>created_date=self._created(args.path)<line_sep># Pendulum bug: https://github.com/sdispater/pendulum/issues/387 # in_words() is a workaround: total_seconds() returns 0 if years are given <if_stmt>self.timedelta.in_words()<block_start>is_past=(created_date+self.timedelta).is_past()<line_sep>match=self.is_older<eq>is_past<block_end><else_stmt><block_start>match=<true><block_end><if_stmt>match<block_start><return>{"created":created_date}<block_end><return><none><block_end><def_stmt>_created self path:Path<arrow>pendulum.DateTime# see https://stackoverflow.com/a/39501288/300783 <block_start>stat=path.stat()<line_sep>time=0# type: SupportsFloat <if_stmt>sys.platform.startswith("win")<block_start>time=stat.st_ctime<block_end><else_stmt><block_start><try_stmt><block_start>time=stat.st_birthtime<block_end><except_stmt>AttributeError# We're probably on Linux. No easy way to get creation dates here, # so we'll settle for when its content was last modified. <block_start>time=stat.st_mtime<block_end><block_end><return>pendulum.from_timestamp(float(time) tz=self.timezone)<block_end><def_stmt>__str__ self<block_start><return>"[Created] All files %s than %s"%(self._mode self.timedelta.in_words() )<block_end><block_end>
<import_stmt>unittest<import_from_stmt>unittest.mock patch<import_from_stmt>bibliopixel.util.image extract_gif_lines<class_stmt>ExtractGifLinesTest(unittest.TestCase)<block_start><def_stmt>test_extract self<block_start>actual=list(extract_gif_lines._extract(GIF_LINES))<line_sep>self.assertEqual(actual EXPECTED1)<block_end><def_stmt>test_extract_gif_lines self<block_start>actual=list(extract_gif_lines.extract_gif_lines(GIF_LINES))<line_sep>self.assertEqual(actual EXPECTED2)<block_end><def_stmt>test_errors self<block_start>actual=list(extract_gif_lines.extract_gif_lines(BAD_LINES))<line_sep>self.assertEqual(actual EXPECTED2)<block_end><block_end>GIF_LINES=""" # Here's some stuff. # now code .. code-block:: yaml math.frog(23) print('glog') # But there's no GIF file. # More code: .. code-block:: yaml animation: BiblioPixelAnimations.matrix.MatrixRain shape: [2, 2] .. code-block:: yaml animation: BiblioPixelAnimations.matrix.MatrixRain shape: [32, 32] .. image:: https://raw.githubusercontent.com/ManiacalLabs/DocsFiles/master/\ BiblioPixel/doc/bibliopixel/animations/something.gif .. code-block:: yaml animation: .split shape: 128 .. image:: https://raw.githubusercontent.com/ManiacalLabs/DocsFiles/master/\ BiblioPixel/doc/bibliopixel/animations/minimal.gif """.splitlines()<line_sep>BAD_LINES=GIF_LINES+""" .. code-block:: json }}} ... image: blah.gif """.splitlines()<line_sep>YAML_LINES_1="""\ animation: BiblioPixelAnimations.matrix.MatrixRain shape: [32, 32] """.splitlines()<line_sep>YAML_LINES_2="""\ animation: .split shape: 128 """.splitlines()<line_sep>EXPECTED1=[('doc/bibliopixel/animations/something.gif' YAML_LINES_1) ('doc/bibliopixel/animations/minimal.gif' YAML_LINES_2)]<line_sep>DATA1={'animation':'BiblioPixelAnimations.matrix.MatrixRain' 'shape':[32 32]}<line_sep>DATA2={'animation':'.split' 'shape':128}<line_sep>EXPECTED2=[('doc/bibliopixel/animations/something.gif' DATA1) ('doc/bibliopixel/animations/minimal.gif' DATA2)]<line_sep>
# Test script for loading Oxts data and convert to Mercator coordinate <import_stmt>os<import_from_stmt>data loadPoses<import_from_stmt>utils postprocessPoses<import_from_stmt>convertPoseToOxts convertPoseToOxts<if_stmt>__name__<eq>"__main__"# root dir of KITTI-360 <block_start><if_stmt>'KITTI360_DATASET'<in>os.environ<block_start>kitti360_dir=os.environ['KITTI360_DATASET']<block_end><else_stmt><block_start>kitti360_dir=os.path.join(os.path.dirname(os.path.realpath(__file__)) '..' '..')<block_end># load poses seq_id=0<line_sep>pose_file=os.path.join(kitti360_dir 'data_poses' '2013_05_28_drive_%04d_sync'%seq_id 'poses.txt')<if_stmt><not>os.path.isfile(pose_file)<block_start><raise>ValueError('%s does not exist! \nPlease specify KITTI360_DATASET in your system path.\nPlease check if you have downloaded system poses (data_poses.zip) and unzipped them under KITTI360_DATASET'%pose_file)<block_end>[ts poses]=loadPoses(pose_file)<line_sep>print('Loaded pose file %s'%pose_file)<line_sep># convert coordinate system from # x=forward, y=left, z=up # to # x=forward, y=right, z=down poses=postprocessPoses(poses)<line_sep># convert to lat/lon coordinate oxts=convertPoseToOxts(poses)<line_sep># write to file output_dir='output'<if_stmt><not>os.path.isdir(output_dir)<block_start>os.makedirs(output_dir)<block_end>output_file='%s/2013_05_28_drive_%04d_sync_pose2oxts.txt'%(output_dir seq_id)<with_stmt>open(output_file 'w')<as>f<block_start><for_stmt>oxts_ oxts<block_start>oxts_=' '.join(['%.6f'%x<for>x oxts_])<line_sep>f.write('%s\n'%oxts_)<block_end><block_end>print('Output written to %s'%output_file)<block_end>
# -*- coding: utf-8 -*- """ pygments.lexers.agile ~~~~~~~~~~~~~~~~~~~~~ Just export lexer classes previously contained in this module. :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """<import_from_stmt>.lisp SchemeLexer<import_from_stmt>.jvm IokeLexer ClojureLexer<import_from_stmt>.python PythonLexer PythonConsoleLexer PythonTracebackLexer Python3Lexer Python3TracebackLexer DgLexer<import_from_stmt>.ruby RubyLexer RubyConsoleLexer FancyLexer<import_from_stmt>.perl PerlLexer Perl6Lexer<import_from_stmt>.d CrocLexer MiniDLexer<import_from_stmt>.iolang IoLexer<import_from_stmt>.tcl TclLexer<import_from_stmt>.factor FactorLexer<import_from_stmt>.scripting LuaLexer MoonScriptLexer<line_sep>__all__=[]<line_sep>
<import_from_future_stmt> division<import_stmt>tensorflow<as>tf<import_stmt>speech_model<class_stmt>MultiSpeechModel(object)<block_start><def_stmt>__init__ self num_gpus<block_start>self.num_gpus=num_gpus<line_sep>self._init_inference=<false><line_sep>self._init_cost=<false><line_sep>self._init_train=<false><block_end><def_stmt>init_inference self config<block_start>batch_size=config['batch_size']<assert_stmt>batch_size%self.num_gpus<eq>0 "Batch size must be divisible by the number of GPUs."<line_sep>batch_per_gpu=batch_size<floordiv>self.num_gpus<line_sep>self._models=[]<for_stmt>i range(self.num_gpus)<block_start><with_stmt>tf.device('/gpu:{}'.format(i))<block_start>model=speech_model.SpeechModel()<line_sep>config['batch_size']=batch_per_gpu<line_sep>model.init_inference(config)<line_sep>tf.get_variable_scope().reuse_variables()<line_sep>self._models.append(model)<block_end><block_end>self._init_inference=<true><block_end><def_stmt>init_cost self<block_start><assert_stmt>self._init_inference "Must init inference before cost."<for_stmt>i range(self.num_gpus)<block_start><with_stmt>tf.device('/gpu:{}'.format(i))<block_start><with_stmt>tf.variable_scope(tf.get_variable_scope() reuse=<true>)<block_start>self._models[i].init_cost()<block_end><block_end><block_end>costs=[model.cost<for>model self._models]<line_sep>zero=tf.constant(0.0)<line_sep>finite_costs=[tf.where(tf.is_finite(c) c zero)<for>c costs]<line_sep>self._cost=tf.div(tf.add_n(finite_costs) self.num_gpus)<line_sep>self._init_cost=<true><block_end><def_stmt>init_train self config<block_start><assert_stmt>self._init_inference "Must init inference before train."<assert_stmt>self._init_cost "Must init cost before train."<line_sep>learning_rate=config['learning_rate']<line_sep>self._momentum_val=config['momentum']<line_sep>max_grad_norm=config['max_grad_norm']<line_sep>decay_steps=config['lr_decay_steps']<line_sep>decay_rate=config['lr_decay_rate']<line_sep>self._momentum=tf.Variable(0.5 trainable=<false>)<line_sep>self._global_step=step=tf.Variable(0 trainable=<false>)<line_sep>self.lr=tf.train.exponential_decay(learning_rate step decay_steps decay_rate staircase=<true>)<with_stmt>tf.variable_scope(tf.get_variable_scope() reuse=tf.AUTO_REUSE)<block_start>ema=tf.train.ExponentialMovingAverage(0.99 name="avg")<line_sep>avg_cost_op=ema.apply([self.cost])<line_sep>self._avg_cost=ema.average(self.cost)<block_end>grads=[]<for_stmt>i range(self.num_gpus)<block_start><with_stmt>tf.device('/gpu:{}'.format(i))<block_start>tvars=tf.trainable_variables()<line_sep>grads.append(tf.gradients(self._models[i].cost tvars))<block_end><block_end>average_grads=_average_gradients(grads)<line_sep>scaled_grads,norm=tf.clip_by_global_norm(average_grads max_grad_norm)<line_sep>self._grad_norm=norm<with_stmt>tf.variable_scope(tf.get_variable_scope() reuse=tf.AUTO_REUSE)<block_start>optimizer=tf.train.MomentumOptimizer(self.lr self._momentum)<with_stmt>tf.control_dependencies([avg_cost_op])<block_start>self._train_op=optimizer.apply_gradients(zip(scaled_grads tvars) global_step=step)<block_end><block_end>self._init_train=<true><block_end><def_stmt>feed_dict self inputs labels=<none><block_start>""" Constructs the feed dictionary from given inputs necessary to run an operations for the model. Args: inputs : List of 2D numpy array input spectrograms. Should be of shape [input_dim x time] labels : List of labels for each item in the batch. Each label should be a list of integers. If label=None does not feed the label placeholder (for e.g. inference only). Returns: A dictionary of placeholder keys and feed values. """<line_sep>feed_dict={}<line_sep>batches=_split_batch(self.num_gpus inputs labels)<for_stmt>model,(i l) zip(self._models batches)<block_start>feed_dict.update(model.feed_dict(i labels=l))<block_end><return>feed_dict<block_end><def_stmt>start_momentum self session<block_start>m=self._momentum.assign(self._momentum_val)<line_sep>session.run([m])<block_end><def_stmt>set_mean_std self mean std session<block_start>self._models[0].set_mean_std(mean std session)<block_end>@property<def_stmt>cost self<block_start><assert_stmt>self._init_cost "Must init cost."<line_sep><return>self._cost<block_end>@property<def_stmt>avg_cost self<block_start><assert_stmt>self._init_train "Must init train."<line_sep><return>self._avg_cost<block_end>@property<def_stmt>grad_norm self<block_start><assert_stmt>self._init_train "Must init train."<line_sep><return>self._grad_norm<block_end>@property<def_stmt>global_step self<block_start><assert_stmt>self._init_train "Must init train."<line_sep><return>self._global_step<block_end>@property<def_stmt>input_dim self<block_start><assert_stmt>self._init_inference "Must init inference."<line_sep><return>self._models[0].input_dim<block_end>@property<def_stmt>output_dim self<block_start><assert_stmt>self._init_inference "Must init inference."<line_sep><return>self._models[0].output_dim<block_end>@property<def_stmt>train_op self<block_start><assert_stmt>self._init_train "Must init train."<line_sep><return>self._train_op<block_end><block_end><def_stmt>_average_gradients model_grads<block_start>""" Calculate the average gradient for each shared variable across all towers. Note that this function provides a synchronization point across all towers. Args: tower_grads: List of list of gradients for each model. Returns: List of gradients where each gradient has been averaged across all models. """<line_sep>average_grads=[]<for_stmt>grads zip(*model_grads)<block_start>grads=[tf.expand_dims(g 0)<for>g grads]<line_sep># Average over the 'model' dimension. grad=tf.concat(grads axis=0)<line_sep>grad=tf.reduce_mean(grad 0)<line_sep>average_grads.append(grad)<block_end><return>average_grads<block_end><def_stmt>_split_batch num_gpus data labels=<none><block_start>""" Split a set of data into batch_size // num_gpus batches. Args: inputs : List of 2D numpy array input spectrograms. Should be of shape [input_dim x time] labels : List of labels for each item in the batch. Each label should be a list of integers. If labels=None the corresponding labels item for each batch will also be None. Returns: A num_gpus length list of (inputs, labels) of the same types as above but with batch_size // num_gpus entries in each. """<line_sep>batch_size=len(data)<line_sep>n=batch_size<floordiv>num_gpus<line_sep>batches=[]<for_stmt>i range(0 batch_size n)<block_start>batch=[data[i:i+n] <none>]<if_stmt>labels<block_start>batch[1]=labels[i:i+n]<block_end>batches.append(batch)<block_end><return>batches<block_end>
<import_stmt>simulation_null<as>sn<import_from_stmt>util runparams<import_stmt>mesh.patch<as>patch<import_stmt>mesh.boundary<as>bnd<class_stmt>TestSimulation(object)<block_start>@classmethod<def_stmt>setup_class cls<block_start>""" this is run once for each class before any tests """<line_sep><pass><block_end>@classmethod<def_stmt>teardown_class cls<block_start>""" this is run once for each class after all tests """<line_sep><pass><block_end><def_stmt>setup_method self<block_start>""" this is run before each test """<line_sep>self.rp=runparams.RuntimeParameters()<line_sep>self.rp.params["driver.tmax"]=1.0<line_sep>self.rp.params["driver.max_steps"]=100<line_sep>self.rp.params["driver.init_tstep_factor"]=0.5<line_sep>self.rp.params["driver.max_dt_change"]=1.2<line_sep>self.rp.params["driver.fix_dt"]=-1.0<line_sep>self.sim=sn.NullSimulation("test" "test" self.rp)<line_sep>myg=patch.Grid2d(8 16)<line_sep>myd=patch.CellCenterData2d(myg)<line_sep>bc=bnd.BC()<line_sep>myd.register_var("a" bc)<line_sep>myd.create()<line_sep>self.sim.cc_data=myd<block_end><def_stmt>teardown_method self<block_start>""" this is run after each test """<line_sep>self.rp=<none><line_sep>self.sim=<none><block_end><def_stmt>test_finished_n self<block_start>self.sim.n=1000<assert_stmt>self.sim.finished()<block_end><def_stmt>test_finished_t self<block_start>self.sim.cc_data.t=2.0<assert_stmt>self.sim.finished()<block_end><def_stmt>test_compute_timestep self# set a dt and n = 0, then init_tstep_factor should kick in <block_start>self.sim.dt=2.0<line_sep>self.sim.n=0<line_sep>self.sim.compute_timestep()<assert_stmt>self.sim.dt<eq>1.0<line_sep># now set dt_old and a new dt and see if the max_dt_change kicks in self.sim.n=1.0<line_sep>self.sim.dt_old=1.0<line_sep>self.sim.dt=2.0<line_sep>self.sim.compute_timestep()<assert_stmt>self.sim.dt<eq>1.2<line_sep># now test what happens if we go over tmax self.sim.cc_data.t=0.75<line_sep>self.dt=0.5<line_sep>self.sim.compute_timestep()<assert_stmt>self.sim.dt<eq>0.25<block_end><block_end><def_stmt>test_grid_setup <block_start>rp=runparams.RuntimeParameters()<line_sep>rp.params["mesh.nx"]=8<line_sep>rp.params["mesh.ny"]=16<line_sep>rp.params["mesh.xmin"]=0.0<line_sep>rp.params["mesh.xmax"]=1.0<line_sep>rp.params["mesh.ymin"]=0.0<line_sep>rp.params["mesh.ymax"]=2.0<line_sep>g=sn.grid_setup(rp)<assert_stmt>g.nx<eq>8<assert_stmt>g.ny<eq>16<assert_stmt>g.dx<eq>1.0/8<assert_stmt>g.dy<eq>1.0/8<block_end>
<import_stmt>os<import_stmt>KratosMultiphysics<as>Kratos<import_from_stmt>KratosMultiphysics Logger<line_sep>Logger.GetDefaultOutput().SetSeverity(Logger.Severity.WARNING)<import_stmt>KratosMultiphysics.KratosUnittest<as>KratosUnittest<import_stmt>KratosMultiphysics.DEMApplication.DEM_analysis_stage<as>DEM_analysis_stage<import_stmt>auxiliary_functions_for_tests<line_sep>this_working_dir_backup=os.getcwd()<def_stmt>GetFilePath fileName<block_start><return>os.path.join(os.path.dirname(os.path.realpath(__file__)) fileName)<block_end><class_stmt>GluedParticlesTestSolution(DEM_analysis_stage.DEMAnalysisStage KratosUnittest.TestCase)<block_start>@classmethod<def_stmt>GetMainPath self<block_start><return>os.path.join(os.path.dirname(os.path.realpath(__file__)) "glued_particles_tests_files")<block_end><def_stmt>GetProblemNameWithPath self<block_start><return>os.path.join(self.main_path self.DEM_parameters["problem_name"].GetString())<block_end><def_stmt>FinalizeSolutionStep self<block_start>super().FinalizeSolutionStep()<line_sep>tolerance=1e-4<for_stmt>node self.spheres_model_part.Nodes<block_start>angular_velocity=node.GetSolutionStepValue(Kratos.ANGULAR_VELOCITY)<if_stmt>node.Id<eq>1<block_start><if_stmt>self.time<g>0.01<block_start>self.assertAlmostEqual(angular_velocity[0] 2.0 delta=tolerance)<block_end><if_stmt>self.time<g>0.499999<and>self.time<l>0.5000001<block_start>self.assertAlmostEqual(node.X -1.0 delta=tolerance)<line_sep>self.assertAlmostEqual(node.Y 0.6634116060768411 delta=tolerance)<line_sep>self.assertAlmostEqual(node.Z 0.21612092234725555 delta=tolerance)<block_end><if_stmt>self.time<g>0.999999<and>self.time<l>1.0000001<block_start>self.assertAlmostEqual(node.X -1.0 tolerance)<line_sep>self.assertAlmostEqual(node.Y 0.6362810292697275 delta=tolerance)<line_sep>self.assertAlmostEqual(node.Z -0.16645873461885752 delta=tolerance)<block_end><block_end><block_end><block_end><def_stmt>Finalize self<block_start>self.procedures.RemoveFoldersWithResults(str(self.main_path) str(self.problem_name) '')<line_sep>super().Finalize()<block_end><block_end><class_stmt>TestGluedParticles(KratosUnittest.TestCase)<block_start><def_stmt>setUp self<block_start><pass><block_end>@classmethod<def_stmt>test_Glued_Particles_1 self<block_start>path=os.path.join(os.path.dirname(os.path.realpath(__file__)) "glued_particles_tests_files")<line_sep>parameters_file_name=os.path.join(path "ProjectParametersDEM.json")<line_sep>model=Kratos.Model()<line_sep>auxiliary_functions_for_tests.CreateAndRunStageInSelectedNumberOfOpenMPThreads(GluedParticlesTestSolution model parameters_file_name auxiliary_functions_for_tests.GetHardcodedNumberOfThreads())<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>Kratos.Logger.GetDefaultOutput().SetSeverity(Logger.Severity.WARNING)<line_sep>KratosUnittest.main()<block_end>
# lcs = longest_consecutive_series # ccn = count_of_consecutive_numbers <class_stmt>Solution(object)#main function <block_start><def_stmt>longestConsecutive self values#sub funcction <block_start>lcs=0# Initializing <for_stmt>i values# Iteration the given value <block_start><if_stmt>i-1<not><in>values# condition check <block_start>value=i<line_sep>ccn=0<while_stmt>i<in>values<block_start>i<augadd>1#increament ccn<augadd>1#increment lcs=max(lcs ccn)#maximum finding <block_end><block_end><block_end><return>lcs<block_end><block_end>print(" Length of LCS is " Solution().longestConsecutive({13 15 19 16 21 17 18 23 1 4}))#Calling two function Solution and longestConsecutive ''' longestConsecutive will be called and it will pass the value. longest_consecutive_series value will be assign loop will be called for i = 1 (True ):- if i-1 not in values ( True ):- value = 1 count_of_consecutive_numbers = 0 for i =1 (True ) i +=1 , i=2 count_of_consecutive_numbers = 1 max will be find between longest_consecutive_series,count_of_consecutive_numbers for i =2 ( False ) back to main loop. for i = 4 ( True ) :- if i-1 not in values ( True ):- value = 4 count_of_consecutive_numbers = 0 for i = 4 (True ) i +=1 , i = 5 count_of_consecutive_numbers = 1 longest_consecutive_series =1 for i = 5 ( False ) back to main loop for i = 5 (False ) :- The value of i will be iterate for i = 13 (True ) :- if i-1 not in values ( True ):- value = 13 count_of_consecutive_numbers = 0 for i = 13 ( True ) i +=1 , i = 13 count_of_consecutive_numbers = 1 longest_consecutive_series = for i = 14 (False) back to main loop for i = 14 (False ):- if i-1 not in values ( False ) for i = 15 (True ):- if i-1 not in values ( True ):- value = 15 count_of_consecutive_numbers = 0 for i = 15 ( True ) i +=1 , i = 16 count_of_consecutive_numbers = 1 longest_consecutive_series =1 for i = 16 ( True ) i +=1 , i = 17 count_of_consecutive_numbers = 2 longest_consecutive_series =2 for i = 17 ( True ) i +=1 , i = 18 count_of_consecutive_numbers = 3 longest_consecutive_series =3 for i = 18 ( True ) i +=1 , i = 19 count_of_consecutive_numbers = 4 longest_consecutive_series =4 for i = 19 ( True ) i +=1 , i = 20 count_of_consecutive_numbers = 5 longest_consecutive_series =5 for i = 20 ( False ) back to main loop for i = 16 ( True ):- if i-1 not in values ( False ):- back to main loop for i = 17 ( True ):- if i-1 not in values ( False ):- back to main loop for i = 18 ( True ):- if i-1 not in values ( False ):- back to main loo for i = 19 ( True ):- if i-1 not in values ( False ):- back to main loo for i = 21 ( True ):- if i-1 not in values ( True ):- value = 21 count_of_consecutive_numbers = 0 for i = 21 ( True ) i +=1 , i = 22 count_of_consecutive_numbers = 1 longest_consecutive_series =1 for i = 22 ( False ):- back to main loop for i = 23 ( True ):- if i-1 not in values ( True ):- value = 23 count_of_consecutive_numbers = 0 for i = 23 ( True ) i +=1 , i = 24 count_of_consecutive_numbers = 1 longest_consecutive_series = 1 for i = 24 (False ):- back to main loop '''<line_sep>
<import_from_stmt>DaPy.methods.core BaseLinearModel<class_stmt>LinearRegressor(BaseLinearModel)<block_start><def_stmt>__init__ self engine='numpy' learn_rate=0.05 l1_penalty=0 l2_penalty=0 fit_intercept=<true><block_start>BaseLinearModel.__init__(self engine learn_rate l1_penalty l2_penalty fit_intercept)<block_end><def_stmt>_forecast self X<block_start><return>X.dot(self._weight)+self._bias<block_end><def_stmt>fit self X Y epoch=500 early_stop=<true> verbose=<false><block_start>self._fit(X Y epoch early_stop verbose)<line_sep><return>self<block_end><def_stmt>predict self X<block_start>X=self._engine.mat(X)<line_sep><return>self._forecast(X)<block_end><block_end>
""" owtf.lib.exceptions ~~~~~~~~~~~~~~~~~~~ Declares the framework exceptions and HTTP errors """<try_stmt><block_start><import_from_stmt>http.client responses<block_end><except_stmt>ImportError<block_start><import_from_stmt>httplib responses<block_end><import_stmt>tornado.web<class_stmt>FrameworkException(Exception)<block_start><def_stmt>__init__ self value<block_start>self.parameter=value<block_end><def_stmt>__repr__ self<block_start><return>self.parameter<block_end><block_end><class_stmt>APIError(tornado.web.HTTPError)<block_start>"""Equivalent to ``RequestHandler.HTTPError`` except for in name"""<block_end><def_stmt>api_assert condition *args **kwargs<block_start>"""Assertion to fail with if not ``condition`` Asserts that ``condition`` is ``True``, else raises an ``APIError`` with the provided ``args`` and ``kwargs`` :type condition: bool """<if_stmt><not>condition<block_start><raise>APIError(*args **kwargs)<block_end><block_end><class_stmt>FrameworkAbortException(FrameworkException)<block_start><pass><block_end><class_stmt>PluginAbortException(FrameworkException)<block_start><pass><block_end><class_stmt>UnreachableTargetException(FrameworkException)<block_start><pass><block_end><class_stmt>UnresolvableTargetException(FrameworkException)<block_start><pass><block_end><class_stmt>DBIntegrityException(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidTargetReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidSessionReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidTransactionReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidParameterType(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidWorkerReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidErrorReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidWorkReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidConfigurationReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidUrlReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidActionReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidMessageReference(FrameworkException)<block_start><pass><block_end><class_stmt>InvalidMappingReference(FrameworkException)<block_start><pass><block_end><class_stmt>DatabaseNotRunningException(Exception)<block_start><pass><block_end><class_stmt>PluginException(Exception)<block_start><pass><block_end><class_stmt>PluginsDirectoryDoesNotExist(PluginException)<block_start>"""The specified plugin directory does not exist."""<block_end><class_stmt>PluginsAlreadyLoaded(PluginException)<block_start>"""`load_plugins()` called twice."""<block_end>
<import_stmt>json<def_stmt>disable_snopt_cells fname<block_start>""" Once the first SNOPT cell is found, delete all code cells. Parameters ---------- fname : str Name of the notebook file, from openmdao_book. """<line_sep>fname=f'openmdao_book/{fname}'<with_stmt>open(fname)<as>f<block_start>dct=json.load(f)<block_end>changed=<false><line_sep>newcells=[]<line_sep>found_snopt=<false><for_stmt>cell dct['cells']<block_start><if_stmt>cell['cell_type']<eq>'code'<block_start><if_stmt>cell['source']# cell is not empty <block_start>code=''.join(cell['source'])<if_stmt>found_snopt<or>'SNOPT'<in>code<block_start>found_snopt=<true><block_end><else_stmt><block_start>newcells.append(cell)<block_end><block_end><block_end><else_stmt><block_start>newcells.append(cell)<block_end><block_end>dct['cells']=newcells<with_stmt>open(fname 'w')<as>f<block_start>json.dump(dct f indent=1 ensure_ascii=<false>)<block_end><return>changed<block_end><if_stmt>__name__<eq>'__main__'<block_start>notebooks=['features/building_blocks/drivers/pyoptsparse_driver.ipynb']<for_stmt>notebook notebooks<block_start>disable_snopt_cells(notebook)<block_end><block_end>
<import_from_stmt>all_models.models TbUserLog<class_stmt>UserLogService(object)<block_start>@staticmethod<def_stmt>updateUserLogService permissionData<block_start>tbModel=TbUserLog.objects.filter(id=permissionData["id"])<line_sep>tbModel.update(**permissionData)<block_end><block_end>
<import_from_stmt>json dumps loads<import_from_stmt>flask Blueprint<import_from_stmt>redis Redis<import_from_stmt>sqlalchemy desc select<import_from_stmt>plenario.database redshift_base<as>rshift_base<import_from_stmt>plenario.models.SensorNetwork SensorMeta<import_from_stmt>plenario.settings REDIS_HOST<line_sep>blueprint=Blueprint('apiary' __name__)<line_sep>redis=Redis(REDIS_HOST)<def_stmt>index <arrow>list<block_start>"""Generate the information necessary for displaying unknown features on the admin index page. """<line_sep>rshift_base.metadata.reflect()<line_sep>unknown_features=rshift_base.metadata.tables['unknown_feature']<line_sep>query=select([unknown_features]).order_by(desc(unknown_features.c.datetime)).limit(5)<line_sep>rp=query.execute()<line_sep>results=[]<for_stmt>row rp<block_start>sensor=SensorMeta.query.get(row.sensor)<if_stmt>sensor<is><none><block_start>expected='No metadata exists for this sensor!'<block_end><else_stmt><block_start>expected=dumps(sensor.observed_properties indent=2 sort_keys=<true>)<block_end>result={'sensor':row.sensor 'datetime':row.datetime 'incoming':dumps(loads(row.data) indent=2 sort_keys=<true> default=str) 'expected':expected}<line_sep>results.append(result)<block_end><return>results<block_end>
<import_from_stmt>rest_framework serializers<import_from_stmt>longclaw.shipping.models.rates ShippingRate<class_stmt>ShippingRateSerializer(serializers.ModelSerializer)<block_start><class_stmt>Meta<block_start>model=ShippingRate<line_sep>fields="__all__"<block_end><block_end>
# optimizer optimizer=dict(type="SGD" lr=0.04 momentum=0.9 weight_decay=0.0001)<line_sep>optimizer_config=dict(grad_clip=<none>)<line_sep># learning policy lr_config=dict(policy="step" warmup="linear" warmup_iters=100 warmup_ratio=0.001 step=[7 11])<line_sep>total_epochs=12<line_sep>
# Copyright 2015 Ufora Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>unittest<import_stmt>ufora.FORA.python.Runtime<as>Runtime<import_stmt>ufora.FORA.python.FORA<as>FORA<import_stmt>ufora.native.FORA<as>ForaNative<class_stmt>TestCompiler(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self.runtime=Runtime.getMainRuntime()<line_sep>self.axioms=self.runtime.getAxioms()<line_sep>self.compiler=self.runtime.getTypedForaCompiler()<block_end><def_stmt>test_resolveAxiomDirectly_smallStrings self<block_start>instance=ForaNative.ImplValContainer(("s1" ForaNative.makeSymbol("Operator") ForaNative.makeSymbol("+") "s2"))<line_sep>jov=ForaNative.implValToJOV(instance)<line_sep>joa=self.axioms.resolveAxiomDirectly(self.compiler jov.getTuple())<line_sep>self.assertEqual(len(joa.throwPart()) 0)<line_sep>self.assertEqual(len(joa.resultPart()) 1)<line_sep>result=joa.resultPart()[0]<line_sep>self.assertEqual(result ForaNative.parseStringToJOV('"s1s2"'))<block_end><def_stmt>test_resolveAxiomDirectly_Vector self<block_start>vectorIVC=FORA.extractImplValContainer(FORA.eval("[]"))<line_sep>jov=ForaNative.parseStringToJOV(("({Vector([])}, `append, 2)"))<line_sep>joa=self.axioms.resolveAxiomDirectly(self.compiler jov.getTuple())<line_sep>self.assertEqual(len(joa.throwPart()) 0)<line_sep>self.assertEqual(len(joa.resultPart()) 1)<line_sep>result=joa.resultPart()[0]<line_sep>self.assertEqual(result ForaNative.parseStringToJOV("{Vector([{Int64}])}"))<block_end><def_stmt>test_resolveAxiomDirectly_VeryLongComputation self<block_start>vectorIVC=FORA.extractImplValContainer(FORA.eval("[]"))<line_sep>jov=ForaNative.parseStringToJOV(("({Vector([])}, `append, 2)"))<line_sep>joa=self.axioms.resolveAxiomDirectly(self.compiler jov.getTuple())<line_sep>self.assertEqual(len(joa.throwPart()) 0)<line_sep>self.assertEqual(len(joa.resultPart()) 1)<line_sep>result=joa.resultPart()[0]<line_sep>self.assertEqual(result ForaNative.parseStringToJOV("{Vector([{Int64}])}"))<block_end><block_end>
<import_from_stmt>.utils *<def_stmt>setup_function function<block_start>""" executed before each method call """<line_sep>print('\n\nSETUP ==> ')<block_end><def_stmt>teardown_function function<block_start>""" teardown any state that was previously setup with a setup_method call. """<line_sep>print("\nTEAR DOWN <==")<block_end><def_stmt>test_basic_serialisation <block_start>p=create_rich_project()<line_sep>p.finalise_and_validate()<line_sep>print(('\n> serialized project: {}'.format(p.dumps(pretty_print=<true>))))<line_sep>deserialised_proj=Project.loads(p.dumps())<line_sep>print(('> deserialized project: {}'.format(deserialised_proj)))<assert_stmt>type(deserialised_proj.created)<eq>datetime<assert_stmt>deserialised_proj.created<eq>p.created '!!! the deserialized project has created field w. type {} while it should be {}'.format(type(deserialised_proj.created) type(p.created))<block_end>
<import_from_stmt>trex_stl_lib.api *<class_stmt>STLS1<block_start><def_stmt>__init__ self<block_start>self.ip_range={'local':{'start':"10.0.0.3" 'end':"10.1.255.255"} 'external':{'start':"172.16.1.3" 'end':"172.16.1.3"} 'remote':{'start':"2.2.0.1" 'end':"2.2.0.1"}}<line_sep>self.port_range={'local':{'start':1025 'end':65535} 'remote':{'start':12 'end':12}}<block_end><def_stmt>create_stream self vm<block_start>base_pkt=Ether()/IP()/UDP()<if_stmt>len(base_pkt)<l>64<block_start>pad_len=64-len(base_pkt)<line_sep>pad=Padding()<line_sep>pad.load='\x00'<times>pad_len<line_sep>base_pkt=base_pkt/pad<block_end>pkt=STLPktBuilder(pkt=base_pkt vm=vm)<line_sep><return>STLStream(packet=pkt mode=STLTXCont())<block_end><def_stmt>get_streams self direction=0 **kwargs<block_start><if_stmt>direction<eq>0<block_start>ip_src=self.ip_range['remote']<line_sep>ip_dst=self.ip_range['external']<line_sep>src_port=self.port_range['remote']<line_sep>dst_port=self.port_range['local']<block_end><else_stmt><block_start>ip_src=self.ip_range['local']<line_sep>ip_dst=self.ip_range['remote']<line_sep>src_port=self.port_range['local']<line_sep>dst_port=self.port_range['remote']<block_end>vm=STLVM()<line_sep>vm.var(name="ip_src" min_value=ip_src['start'] max_value=ip_src['end'] size=4 op="random")<line_sep>vm.var(name="ip_dst" min_value=ip_dst['start'] max_value=ip_dst['end'] size=4 op="random")<line_sep>vm.var(name="src_port" min_value=src_port['start'] max_value=src_port['end'] size=2 op="random")<line_sep>vm.var(name="dst_port" min_value=dst_port['start'] max_value=dst_port['end'] size=2 op="random")<line_sep>vm.write(fv_name="ip_src" pkt_offset="IP.src")<line_sep>vm.write(fv_name="ip_dst" pkt_offset="IP.dst")<line_sep>vm.write(fv_name="src_port" pkt_offset="UDP.sport")<line_sep>vm.write(fv_name="dst_port" pkt_offset="UDP.dport")<line_sep>vm.fix_chksum()<line_sep><return>[self.create_stream(vm)]<block_end><block_end># dynamic load - used for trex console or simulator <def_stmt>register <block_start><return>STLS1()<block_end>
<import_stmt>pytest<import_from_stmt>app.integrations.s3 AppS3<line_sep>pytestmark=[pytest.mark.django_db]<def_stmt>test_client_init <block_start>client=AppS3().client<assert_stmt>'botocore.client.S3'<in>str(client.__class__)<block_end>
# -*- coding: utf-8 -*- <try_stmt><block_start><import_from_stmt>typing Set List Tuple Any<block_end><except_stmt>Exception<block_start><pass><block_end><import_stmt>sublime<import_stmt>sublime_plugin<import_from_stmt>functools partial<import_stmt>re<import_from_stmt>.utils to_relative_path PythonCommandMixin get_settings is_python_scope debounce<import_from_stmt>.daemon ask_daemon<import_from_stmt>.settings get_settings_param<class_stmt>BaseLookUpJediCommand(PythonCommandMixin)<block_start><def_stmt>_jump_to_in_window self filename line_number=<none> column_number=<none> transient=<false><block_start>""" Opens a new window and jumps to declaration if possible :param filename: string or int :param line_number: int :param column_number: int :param transient: bool If transient is True, opens a transient view """<line_sep>active_window=self.view.window()<line_sep># restore saved location <try_stmt><block_start><if_stmt>self.view.sel()[0]<ne>self.point<block_start>self.view.sel().clear()<line_sep>self.view.sel().add(self.point)<block_end><block_end><except_stmt>AttributeError# called without setting self.point <block_start><pass><block_end># If the file was selected from a drop down list <if_stmt>isinstance(filename int)<block_start><if_stmt>filename<eq>-1# cancelled # restore view <block_start>active_window.focus_view(self.view)<line_sep>self.view.show(self.point)<line_sep><return><block_end>filename,line_number,column_number=self.options[filename]<block_end>flags=self.prepare_layout(active_window transient filename)<line_sep>active_window.open_file('%s:%s:%s'%(filename line_number<or>0 column_number<or>0) flags)<block_end><def_stmt>prepare_layout self window transient filename<block_start>""" prepares the layout of the window to configured and returns flags for opening the file """<line_sep>flags=sublime.ENCODED_POSITION<if_stmt>transient<block_start>flags<augor>sublime.TRANSIENT<line_sep># sublime cant show quick panel with options on one panel and # file's content in transient mode on another panel # so dont do anything if its a requrest to show just options <return>flags<block_end>goto_layout=get_settings_param(self.view 'sublime_goto_layout')<if_stmt>goto_layout<eq>'single-panel-transient'<and><not>transient<block_start>flags<augor>sublime.TRANSIENT<block_end><elif_stmt>goto_layout<eq>'two-panel'<block_start>self.switch_to_two_panel_layout(window filename)<block_end><elif_stmt>goto_layout<eq>'two-panel-transient'<block_start>self.switch_to_two_panel_layout(window filename)<if_stmt><not>transient<block_start>flags<augor>sublime.TRANSIENT<block_end><block_end><return>flags<block_end><def_stmt>switch_to_two_panel_layout self window filename<block_start>curr_group=window.active_group()<line_sep>layout=window.get_layout()<if_stmt>len(layout['cells'])<eq>1# currently a single panel layout so switch to two panels <block_start>window.set_layout({'cols':[0.0 0.5 1.0] 'rows':[0.0 1.0] 'cells':[[0 0 1 1] [1 0 2 1]] })<block_end># select non current group(panel) selected_group=<none><for_stmt>group range(window.num_groups())<block_start><if_stmt>group<ne>curr_group<block_start>selected_group=group<line_sep>window.focus_group(group)<line_sep><break><block_end><block_end># if the file is already opened and is in current group # move it to another panel. files_in_curr_group=dict([(i.file_name() i)<for>i window.views_in_group(curr_group)])<if_stmt>filename<and>filename<in>files_in_curr_group<block_start><if_stmt>files_in_curr_group[filename].view_id<ne>self.view.view_id<block_start>window.set_view_index(files_in_curr_group[filename] selected_group 0)<block_end><block_end><block_end><def_stmt>_window_quick_panel_open_window self view options<block_start>""" Shows the active `sublime.Window` quickpanel (dropdown) for user selection. :param option: list of `jedi.api_classes.BasDefinition` """<line_sep>active_window=view.window()<line_sep># remember filenames self.options=options<line_sep># remember current file location self.point=self.view.sel()[0]<line_sep># Show the user a selection of filenames active_window.show_quick_panel([self.prepare_option(o)<for>o options] self._jump_to_in_window on_highlight=partial(self._jump_to_in_window transient=<true>))<block_end><def_stmt>prepare_option self option<block_start>""" prepare option to display out in quick panel """<line_sep><raise>NotImplementedError("{} require `prepare_option` definition".format(self.__class__))<block_end><block_end><class_stmt>SublimeJediGoto(BaseLookUpJediCommand sublime_plugin.TextCommand)<block_start>""" Go to object definition """<def_stmt>run self edit<block_start>follow_imports=get_settings(self.view)['follow_imports']<line_sep>ask_daemon(self.view self.handle_definitions 'goto' ask_kwargs={'follow_imports':follow_imports} )<block_end><def_stmt>handle_definitions self view defns<block_start><if_stmt><not>defns<block_start><return><false><block_end><if_stmt>len(defns)<eq>1<block_start>defn=defns[0]<line_sep>self._jump_to_in_window(*defn)<block_end><else_stmt><block_start>self._window_quick_panel_open_window(view defns)<block_end><block_end><def_stmt>prepare_option self option<block_start><return>to_relative_path(option[0])<block_end><block_end><class_stmt>SublimeJediFindUsages(BaseLookUpJediCommand sublime_plugin.TextCommand)<block_start>""" Find object usages, and optionally rename objects. """<def_stmt>run self edit<block_start>self.edit=edit<line_sep>ask_daemon(self.view self.handle_usages 'usages')<block_end><def_stmt>handle_usages self view options<arrow><none><block_start><if_stmt><not>options<block_start><return><block_end>active_window=view.window()<line_sep># remember filenames self.options=options<line_sep># remember current file location self.point=self.view.sel()[0]<line_sep># expands selection to all of "focused" symbol name=expand_selection(self.view self.point)<def_stmt>handle_rename new_name:str<arrow><none><block_start>groups=[]# type: List[List[Tuple[str, int, int]]] files=set()# type: Set[str] <for_stmt>option options<block_start>file=option[0]<if_stmt><not>file# can't replace text (or even show usages) in unsaved file <block_start><continue><block_end><if_stmt>file<in>files<block_start>groups[-1].append(option)<block_end><else_stmt><block_start>groups.append([option])<block_end>files.add(file)<block_end><for_stmt>group groups<block_start>rename_in_file(group group[0][0] new_name)<block_end><block_end><def_stmt>rename_in_file group file_ new_name# type: (List[Tuple[str, int, int]], str, str) -> None <block_start><with_stmt>open(file_)<as>f<block_start>text=f.read()<block_end>original_text=text<line_sep>offset=0<for_stmt>option group<block_start><assert_stmt>text<and>name<line_sep>_,row,col=option<line_sep>point=text_point(original_text row-1 col-1)<line_sep>text=text[:point+offset]+new_name+text[point+offset+len(name):]<line_sep>offset<augadd>len(new_name)-len(name)<block_end><with_stmt>open(file_ "w")<as>f<block_start>f.write(text)<block_end><block_end><def_stmt>handle_choose idx<block_start><if_stmt><not>name<block_start><return><block_end><if_stmt>idx<eq>0<block_start>view.window().show_input_panel("New name:" name handle_rename <none> <none>)<line_sep><return><block_end>self._jump_to_in_window(idx-1<if>idx<ne>-1<else>idx)<block_end><def_stmt>handle_highlight idx<block_start><if_stmt>idx<eq>0<block_start><return><block_end>self._jump_to_in_window(idx-1<if>idx<ne>-1<else>idx transient=<true>)<block_end># Show the user a selection of filenames files={option[0]<for>option options}# type: Set[str] first_option=[['rename "{}"'.format(name) "{} occurrence{} in {} file{}".format(len(options) 's'<if>len(options)<ne>1<else>'' len(files) 's'<if>len(files)<ne>1<else>'')]]<line_sep>active_window.show_quick_panel(first_option+[self.prepare_option(o)<for>o options] handle_choose on_highlight=handle_highlight)<block_end><def_stmt>prepare_option self option<block_start><return>[to_relative_path(option[0]) "line: %d column: %d"%(option[1] option[2])]<block_end><block_end><def_stmt>expand_selection view point# type: (Any, Any) -> str <block_start>name=""<line_sep>_,col=view.rowcol(point.begin())<for_stmt>match re.finditer(r"[A-Za-z0-9_]+" view.substr(view.line(point.begin())))<block_start><if_stmt>match.start()<le>col<and>match.end()<ge>col<block_start>name=match.group()<block_end><block_end><return>name<block_end><def_stmt>text_point text:str row:int col:int<arrow>int<block_start>""" Return the integer offset for the char at 0-indexed row and col in text. Similar to View.text_point, but doesn't require loading the view first. """<line_sep>chars=0<for_stmt>line text.splitlines()[:row]<block_start>chars<augadd>len(line)+1<block_end><return>chars+col<block_end><class_stmt>SublimeJediEventListener(sublime_plugin.EventListener)<block_start><def_stmt>on_selection_modified_async self view<arrow><none><block_start>should_highlight=get_settings_param(view 'highlight_usages_on_select')<if_stmt><not>view.file_name()<or><not>is_python_scope(view view.sel()[0].begin())<or><not>should_highlight<block_start><return><block_end>highlight_usages(view)<block_end><block_end>@debounce(0.35)<def_stmt>highlight_usages view<arrow><none><block_start>ask_daemon(view handle_highlight_usages 'usages')<block_end><def_stmt>handle_highlight_usages view options# type: (Any, List[Tuple[str, int, int]]) -> None <block_start>name=expand_selection(view view.sel()[0])<line_sep>file_name=view.file_name()<def_stmt>get_region o# type: (Tuple[str, int, int]) -> Any <block_start>_,row,col=o<line_sep>point=view.text_point(row-1 col-1)<line_sep><return>sublime.Region(point point+len(name))<block_end>regions=[get_region(o)<for>o options<if>o[0]<eq>file_name]<if_stmt><not>regions<block_start>view.erase_regions('sublime-jedi-usages')<line_sep><return><block_end>highlight_color=get_settings_param(view 'highlight_usages_color')<line_sep>view.add_regions("sublime-jedi-usages" regions highlight_color<or>"region.bluish" flags=sublime.DRAW_NO_FILL|sublime.DRAW_NO_OUTLINE|sublime.DRAW_SOLID_UNDERLINE)<block_end>
"""Tests for deployment destination registration hook."""<import_stmt>unittest<import_from_stmt>grow.extensions.hooks deployment_register_hook<class_stmt>DeploymentRegisterHookTestCase(unittest.TestCase)<block_start>"""Test the deployment destination registration hook."""<def_stmt>test_something self<block_start>"""?"""<line_sep><pass><block_end><block_end>
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Create an entity type so that you can create its related features. # See https://cloud.google.com/vertex-ai/docs/featurestore/setup before running # the code snippet # [START aiplatform_create_entity_type_sample] <import_from_stmt>google.cloud aiplatform<def_stmt>create_entity_type_sample project:str featurestore_id:str entity_type_id:str description:str="sample entity type" location:str="us-central1" api_endpoint:str="us-central1-aiplatform.googleapis.com" timeout:int=300 # The AI Platform services require regional API endpoints, which need to be # in the same region or multi-region overlap with the Feature Store location. <block_start>client_options={"api_endpoint":api_endpoint}<line_sep># Initialize client that will be used to create and send requests. # This client only needs to be created once, and can be reused for multiple requests. client=aiplatform.gapic.FeaturestoreServiceClient(client_options=client_options)<line_sep>parent=f"projects/{project}/locations/{location}/featurestores/{featurestore_id}"<line_sep>create_entity_type_request=aiplatform.gapic.CreateEntityTypeRequest(parent=parent entity_type_id=entity_type_id entity_type=aiplatform.gapic.EntityType(description=description) )<line_sep>lro_response=client.create_entity_type(request=create_entity_type_request)<line_sep>print("Long running operation:" lro_response.operation.name)<line_sep>create_entity_type_response=lro_response.result(timeout=timeout)<line_sep>print("create_entity_type_response:" create_entity_type_response)<block_end># [END aiplatform_create_entity_type_sample]
<import_from_stmt>pyramda.function.curry curry<import_stmt>builtins<line_sep>@curry<def_stmt>min xs<block_start><return>builtins.min(xs)<block_end>
<import_stmt>json<import_stmt>unittest<import_from_stmt>manager_rest.test.attribute attr<import_from_stmt>manager_rest.test.base_test LATEST_API_VERSION<import_from_stmt>manager_rest.storage models<import_from_stmt>manager_rest.deployment_update.step_extractor PROPERTY PROPERTIES OUTPUT OUTPUTS WORKFLOW WORKFLOWS NODE NODES OPERATION OPERATIONS RELATIONSHIP RELATIONSHIPS SOURCE_OPERATIONS TARGET_OPERATIONS TYPE GROUP GROUPS POLICY_TYPE POLICY_TYPES POLICY_TRIGGER POLICY_TRIGGERS HOST_ID PLUGIN DEPLOYMENT_PLUGINS_TO_INSTALL PLUGINS_TO_INSTALL DESCRIPTION extract_steps _update_topology_order_of_add_node_steps _find_relationship <import_from_stmt>manager_rest.deployment_update.step_extractor DeploymentUpdateStep<import_from_stmt>manager_rest.test.utils get_resource<line_sep>@attr(client_min_version=2.1 client_max_version=LATEST_API_VERSION)<class_stmt>StepExtractorTestCase(unittest.TestCase)<block_start>@staticmethod<def_stmt>_get_node_scheme node_id='node1' **params<block_start>node={'id':node_id OPERATIONS:{} PROPERTIES:{} RELATIONSHIPS:[] TYPE:'' HOST_ID:'' PLUGINS_TO_INSTALL:[]}<line_sep>node.update(params)<line_sep><return>node<block_end>@staticmethod<def_stmt>_get_relationship_scheme <block_start><return>{SOURCE_OPERATIONS:{} "target_id":"" TARGET_OPERATIONS:{} TYPE:"" PROPERTIES:{}}<block_end><def_stmt>setUp self<block_start>super(StepExtractorTestCase self).setUp()<line_sep>self.deployment=models.Deployment(id='deployment_id')<line_sep>self.deployment_plan={DESCRIPTION:<none> NODES:{} OPERATIONS:{} PROPERTIES:{} RELATIONSHIPS:[] TYPE:'' GROUPS:{} POLICY_TYPES:{} POLICY_TRIGGERS:{} DEPLOYMENT_PLUGINS_TO_INSTALL:{} OUTPUTS:{} WORKFLOWS:{}}<block_end><def_stmt>test_entity_name self<block_start>step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node1')<line_sep>self.assertEqual('node1' step.entity_name)<block_end><def_stmt>test_update_topology_order_of_add_node_steps self<block_start>add_node_a_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node_a')<line_sep>add_node_b_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node_b')<line_sep>add_node_c_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node_c')<line_sep>add_node_d_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node_d')<line_sep>add_node_e_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node_e')<line_sep>add_node_f_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node_f')<line_sep>steps=[add_node_a_step add_node_b_step add_node_c_step add_node_d_step add_node_e_step add_node_f_step]<line_sep># Imagine the following relationships between the added nodes: # # e # ^^ # | \ # c d # ^ ^ # / \ # a b f topologically_sorted_added_nodes=['node_f' 'node_a' 'node_b' 'node_c' 'node_d' 'node_e']<line_sep>_update_topology_order_of_add_node_steps(steps topologically_sorted_added_nodes)<line_sep>self.assertEqual(5 add_node_e_step.topology_order)<line_sep>self.assertEqual(4 add_node_d_step.topology_order)<line_sep>self.assertEqual(3 add_node_c_step.topology_order)<line_sep>self.assertEqual(2 add_node_b_step.topology_order)<line_sep>self.assertEqual(1 add_node_a_step.topology_order)<line_sep>self.assertEqual(0 add_node_f_step.topology_order)<block_end><def_stmt>test_create_added_nodes_graph self<block_start>self.deployment_plan[NODES]=[self._get_node_scheme('node_a' relationships=[{"target_id":'node_c'}]) self._get_node_scheme('node_b' relationships=[{"target_id":'node_c'}]) self._get_node_scheme('node_c' relationships=[{"target_id":'node_e'}]) self._get_node_scheme('node_d' relationships=[{"target_id":'node_e'}]) self._get_node_scheme('node_e') self._get_node_scheme('node_f') ]<line_sep>steps,_=extract_steps([] self.deployment self.deployment_plan)<line_sep>order_by_id={s.entity_id:s.topology_order<for>s steps}<assert_stmt>order_by_id['nodes:node_c']<g>order_by_id['nodes:node_a']<assert_stmt>order_by_id['nodes:node_c']<g>order_by_id['nodes:node_b']<assert_stmt>order_by_id['nodes:node_e']<g>order_by_id['nodes:node_c']<assert_stmt>order_by_id['nodes:node_e']<g>order_by_id['nodes:node_d']<block_end><def_stmt>test_description_no_change self<block_start>self.deployment.description='description'<line_sep>self.deployment_plan[DESCRIPTION]='description'<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_description_modify_description self<block_start>self.deployment.description='description_old'<line_sep>self.deployment_plan[DESCRIPTION]='description_new'<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=DESCRIPTION entity_id='description')]<block_end><def_stmt>test_outputs_no_change self<block_start>self.deployment.outputs={'output1':'output1_value'}<line_sep>self.deployment_plan[OUTPUTS]=self.deployment.outputs<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_outputs_add_output self<block_start>self.deployment_plan[OUTPUTS]={'output1':'output1_value'}<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=OUTPUT entity_id='outputs:output1')]<block_end><def_stmt>test_outputs_remove_output self<block_start>self.deployment.outputs={'output1':'output1_value'}<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=OUTPUT entity_id='outputs:output1')]<block_end><def_stmt>test_outputs_modify_output self<block_start>self.deployment.outputs={'output1':'output1_value'}<line_sep>self.deployment_plan[OUTPUTS]={'output1':'output1_modified_value'}<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=OUTPUT entity_id='outputs:output1')]<block_end><def_stmt>test_workflows_no_change self<block_start>self.deployment.workflows={'intact_workflow':{'operation':'module_name.foo' 'plugin':'plugin_for_workflows'}}<line_sep>self.deployment_plan[WORKFLOWS]=self.deployment.workflows<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_workflows_add_workflow_of_existing_plugin self<block_start>self.deployment_plan[WORKFLOWS]={'added_workflow':{'operation':'module_name.foo' 'plugin':'plugin_for_workflows'}}<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=WORKFLOW entity_id='workflows:added_workflow')]<block_end><def_stmt>test_workflows_add_workflow_script self<block_start>self.deployment_plan[WORKFLOWS]={'new_workflow':{'plugin':'script' }}<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=WORKFLOW entity_id='workflows:new_workflow')]<block_end><def_stmt>test_workflows_remove_workflow self<block_start>self.deployment.workflows={'removed_workflow':{'operation':'module_name.foo' 'plugin':'plugin_for_workflows'}}<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=WORKFLOW entity_id='workflows:removed_workflow')]<block_end><def_stmt>test_workflows_modify_workflow_of_existing_plugin self<block_start>self.deployment.workflows={'added_workflow':{'operation':'module_name.foo' 'plugin':'plugin_for_workflows'}}<line_sep>self.deployment_plan[WORKFLOWS]={'added_workflow':{'operation':'module_name.bar' 'plugin':'plugin_for_workflows'}}<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=WORKFLOW entity_id='workflows:added_workflow')]<block_end><def_stmt>test_nodes_no_change self<block_start>nodes=[self._get_node_scheme()]<line_sep>self.deployment_plan[NODES]=nodes<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_nodes_add_node self<block_start>self.deployment_plan[NODES]=[self._get_node_scheme()]<line_sep>steps,_=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=NODE entity_id='nodes:node1')]<block_end><def_stmt>test_nodes_remove_node self<block_start>nodes=[self._get_node_scheme()]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=NODE entity_id='nodes:node1')]<block_end><def_stmt>test_nodes_add_and_remove_node_changed_type self<block_start>nodes=[self._get_node_scheme(type='old_type')]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(type='new_type')]<line_sep>supported_steps,unsupported_steps=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>len(supported_steps)<eq>0<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='modify' entity_type=NODE entity_id='nodes:node1' supported=<false>) ]<block_end><def_stmt>test_nodes_add_and_remove_node_changed_type_and_host_id self<block_start>nodes=[self._get_node_scheme(host_id='old_host_id')]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(type='new_host_id')]<line_sep>supported_steps,unsupported_steps=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>len(supported_steps)<eq>0<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='modify' entity_type=NODE entity_id='nodes:node1' supported=<false>) ]<block_end><def_stmt>test_node_properties_no_change self<block_start>nodes=[self._get_node_scheme(properties={'property1':'property1_value'})]<line_sep>self.deployment_plan[NODES]=nodes<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_node_properties_add_property self<block_start>nodes=[self._get_node_scheme()]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(properties={'property1':'property1_value'})]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=PROPERTY entity_id='nodes:node1:properties:property1')]<block_end><def_stmt>test_node_properties_remove_property self<block_start>nodes=[self._get_node_scheme(properties={'property1':'property1_value'})]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme()]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=PROPERTY entity_id='nodes:node1:properties:property1')]<block_end><def_stmt>test_node_properties_modify_property self<block_start>nodes=[self._get_node_scheme(properties={'property1':'property1_value'})]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(properties={'property1':'property1_modified_value'})]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=PROPERTY entity_id='nodes:node1:properties:property1')]<block_end><def_stmt>test_node_operations_no_change self<block_start>nodes=[self._get_node_scheme(operations={'full.operation1.name':{'operation1_field':'operation1_field_value'}})]<line_sep>self.deployment_plan[NODES]=nodes<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_node_operations_add_operation self<block_start>nodes=[self._get_node_scheme()]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(operations={'full.operation1.name':{'operation1_field':'operation1_field_value'}})]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=OPERATION entity_id='nodes:node1:operations:full.operation1.name')]<block_end><def_stmt>test_node_operations_remove_operation self<block_start>nodes=[self._get_node_scheme(operations={'full.operation1.name':{'operation1_field':'operation1_field_value'}})]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme()]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=OPERATION entity_id='nodes:node1:operations:full.operation1.name')]<block_end><def_stmt>test_node_operations_modify_operation self<block_start>nodes=[self._get_node_scheme(operations={'full.operation1.name':{'operation1_field':'operation1_field_value'}})]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(operations={'full.operation1.name':{'operation1_field':'operation1_modified_field_value'}})]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=OPERATION entity_id='nodes:node1:operations:full.operation1.name')]<block_end><def_stmt>test_relationships_no_change self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target'}])]<line_sep>self.deployment_plan[NODES]=nodes<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_relationships_add_relationship self<block_start>nodes=[self._get_node_scheme()]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target'}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]')]<block_end><def_stmt>test_relationships_remove_relationship self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target'}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme()]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]')]<block_end><def_stmt>test_relationships_change_type self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target'}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'different_relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target'}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]') DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]')]<block_end><def_stmt>test_relationships_change_target_non_contained_in self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'target_id':'relationship_target' 'type_hierarchy':['rel_hierarchy']}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'target_id':'different_relationship_target' 'type_hierarchy':['rel_hierarchy']}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]') DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]')]<block_end><def_stmt>test_relationships_change_target_contained_in self<block_start>nodes=[self._get_node_scheme(relationships=[{'target_id':'relationship_target' 'type_hierarchy':['rel_hierarchy' 'cloudify.relationships.contained_in']}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'target_id':'different_relationship_target' 'type_hierarchy':['rel_hierarchy' 'cloudify.relationships.contained_in']}])]<line_sep>_,unsupported_steps=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='modify' entity_type=NODE entity_id='nodes:node1' supported=<false>) ]<block_end><def_stmt>test_relationships_change_type_and_target self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target'}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'different_relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'different_relationship_target'}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]') DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]')]<block_end><def_stmt>test_relationships_modify_order self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_1'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_2'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_3'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_4'}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_2'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_4'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_3'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_1'}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<line_sep># we don't care for the order the steps were created in <assert_stmt>set(steps)<eq>{DeploymentUpdateStep(action='modify' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]:[3]') DeploymentUpdateStep(action='modify' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[1]:[0]') DeploymentUpdateStep(action='modify' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[3]:[1]')}<block_end><def_stmt>test_relationships_modify_order_with_add_and_remove self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_1'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_2'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_3'} ])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_5'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_2'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_4'} {'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target_1'}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<line_sep># we don't care for the order the steps were created in <assert_stmt>set(steps)<eq>{DeploymentUpdateStep(action='modify' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]:[3]') DeploymentUpdateStep(action='remove' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[2]') DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[2]') DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='nodes:node1:relationships:[0]')}<block_end><def_stmt>test_relationships_add_source_operation self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' SOURCE_OPERATIONS:{}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' SOURCE_OPERATIONS:{'full.operation1':{}}}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=OPERATION entity_id='nodes:node1:relationships:[0]:'<concat>'source_operations:full.operation1')]<block_end><def_stmt>test_relationships_remove_source_operation self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' SOURCE_OPERATIONS:{'full.operation1':{}}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' SOURCE_OPERATIONS:{}}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=OPERATION entity_id='nodes:node1:relationships:[0]:'<concat>'source_operations:full.operation1')]<block_end><def_stmt>test_duplicate_relationship self<block_start>rel={'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' }<line_sep>nodes=[self._get_node_scheme(relationships=[rel rel])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[rel rel])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_relationships_modify_source_operation self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' SOURCE_OPERATIONS:{'full.operation1':{'op1_old_field':'op1_field_value'}}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' SOURCE_OPERATIONS:{'full.operation1':{'op1_new_field':'op1_field_value'}}}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=OPERATION entity_id='nodes:node1:relationships:[0]:'<concat>'source_operations:full.operation1')]<block_end><def_stmt>test_relationships_add_target_operation self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' TARGET_OPERATIONS:{}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' TARGET_OPERATIONS:{'full.operation1':{}}}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=OPERATION entity_id='nodes:node1:relationships:[0]:'<concat>'target_operations:full.operation1')]<block_end><def_stmt>test_relationships_remove_target_operation self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' TARGET_OPERATIONS:{'full.operation1':{}}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' TARGET_OPERATIONS:{}}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='remove' entity_type=OPERATION entity_id='nodes:node1:relationships:[0]:'<concat>'target_operations:full.operation1')]<block_end><def_stmt>test_relationships_modify_target_operation self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' TARGET_OPERATIONS:{'full.operation1':{'op1_old_field':'op1_field_value'}}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' TARGET_OPERATIONS:{'full.operation1':{'op1_new_field':'op1_field_value'}}}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=OPERATION entity_id='nodes:node1:relationships:[0]:'<concat>'target_operations:full.operation1')]<block_end><def_stmt>test_get_matching_relationship self<block_start>relationships_with_match=[{'type':'typeA' 'target_id':'id_1' 'field2':'value2'} {'type':'typeB' 'target_id':'id_1'} {'type':'typeB' 'target_id':'id_2'} {'type':'typeA' 'target_id':'id_2'}]<line_sep>relationships_with_no_match=[{'type':'typeB' 'target_id':'id_1'} {'type':'typeB' 'target_id':'id_2'} {'type':'typeA' 'target_id':'id_2'}]<assert_stmt>_find_relationship(relationships_with_match 'typeA' 'id_1')<eq>({'type':'typeA' 'target_id':'id_1' 'field2':'value2'} 0)<assert_stmt>_find_relationship(relationships_with_no_match 'typeA' 'id_1')<eq>(<none> <none>)<block_end><def_stmt>test_sort_steps_compare_action self<block_start>add_step=DeploymentUpdateStep(action='add' entity_type='' entity_id='')<line_sep>remove_step=DeploymentUpdateStep(action='remove' entity_type='' entity_id='')<line_sep>modify_step=DeploymentUpdateStep(action='modify' entity_type='' entity_id='')<line_sep>steps=[add_step remove_step modify_step]<line_sep>expected_step_order=[remove_step add_step modify_step]<line_sep>steps.sort()<assert_stmt>steps<eq>expected_step_order<block_end><def_stmt>test_sort_steps_add_node_before_add_relationship self<block_start>add_node_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='')<line_sep>add_relationship_step=DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='')<line_sep>steps=[add_relationship_step add_node_step]<line_sep>expected_step_order=[add_node_step add_relationship_step]<line_sep>steps.sort()<assert_stmt>steps<eq>expected_step_order<block_end><def_stmt>test_sort_steps_remove_relationship_before_remove_node self<block_start>remove_relationship_step=DeploymentUpdateStep(action='remove' entity_type=RELATIONSHIP entity_id='')<line_sep>remove_node_step=DeploymentUpdateStep(action='remove' entity_type=NODE entity_id='')<line_sep>steps=[remove_node_step remove_relationship_step]<line_sep>expected_step_order=[remove_relationship_step remove_node_step]<line_sep>steps.sort()<assert_stmt>steps<eq>expected_step_order<block_end><def_stmt>test_sort_steps_higher_topology_before_lower_topology self<block_start>default_topology_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='')<line_sep>topology_order_1_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='' topology_order=1)<line_sep>topology_order_2_step=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='' topology_order=2)<line_sep>steps=[topology_order_1_step default_topology_step topology_order_2_step]<line_sep>expected_step_order=[topology_order_2_step topology_order_1_step default_topology_step]<line_sep>steps.sort()<assert_stmt>steps<eq>expected_step_order<block_end><def_stmt>test_sort_steps_all_comparison_considerations self<block_start>add_node_step_default_topology=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='')<line_sep>add_node_step_topology_order_1=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='' topology_order=1)<line_sep>add_node_step_topology_order_2=DeploymentUpdateStep(action='add' entity_type=NODE entity_id='' topology_order=2)<line_sep>remove_relationship_step=DeploymentUpdateStep(action='remove' entity_type=RELATIONSHIP entity_id='')<line_sep>remove_node_step=DeploymentUpdateStep(action='remove' entity_type=NODE entity_id='')<line_sep>add_relationship_step=DeploymentUpdateStep(action='add' entity_type=RELATIONSHIP entity_id='')<line_sep>modify_property_step=DeploymentUpdateStep(action='modify' entity_type=PROPERTY entity_id='')<line_sep>steps=[add_node_step_topology_order_1 remove_node_step modify_property_step add_relationship_step add_node_step_default_topology remove_relationship_step add_node_step_topology_order_2]<line_sep>expected_step_order=[remove_relationship_step remove_node_step add_node_step_topology_order_2 add_node_step_topology_order_1 add_node_step_default_topology add_relationship_step modify_property_step]<line_sep>steps.sort()<assert_stmt>steps<eq>expected_step_order<block_end><def_stmt>test_relationships_intact_property self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' PROPERTIES:{'property1':'property1_value'}}])]<line_sep>self.deployment_plan[NODES]=nodes<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<block_end><def_stmt>test_relationships_add_property self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' 'properties':{}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' PROPERTIES:{'property1':'property1_different_value'}}])]<line_sep>_,unsupported_steps=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='add' entity_type=PROPERTY entity_id='nodes:node1:relationships:[0]:'<concat>'properties:property1' supported=<false>)]<block_end><def_stmt>test_relationships_remove_property self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' PROPERTIES:{'property1':'property1_different_value'}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' 'properties':{}}])]<line_sep>_,unsupported_steps=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='remove' entity_type=PROPERTY entity_id='nodes:node1:relationships:[0]:'<concat>'properties:property1' supported=<false>)]<block_end><def_stmt>test_relationships_modify_property self<block_start>nodes=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' PROPERTIES:{'property1':'property1_value'}}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(relationships=[{'type':'relationship_type' 'type_hierarchy':['rel_hierarchy'] 'target_id':'relationship_target' PROPERTIES:{'property1':'property1_different_value'}}])]<line_sep>_,unsupported_steps=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='modify' entity_type=PROPERTY entity_id='nodes:node1:relationships:[0]:'<concat>'properties:property1' supported=<false>)]<block_end><def_stmt>test_extract_steps_policy_types_no_change self<block_start>policy_types={'policy_type1':'policy_type1_value'}<line_sep>self.deployment.policy_types=policy_types<line_sep>self.deployment_plan[POLICY_TYPES]=policy_types<line_sep>steps,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<assert_stmt>unsupported_steps<eq>[]<block_end><def_stmt>test_policy_types_add_policy_type self<block_start>self.deployment_plan[POLICY_TYPES]={'policy_type1':'policy_type1_value'}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='add' entity_type=POLICY_TYPE entity_id='policy_types:policy_type1' supported=<false>)]<block_end><def_stmt>test_policy_types_remove_policy_type self<block_start>self.deployment.policy_types={'policy_type1':'policy_type1_value'}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='remove' entity_type=POLICY_TYPE entity_id='policy_types:policy_type1' supported=<false>)]<block_end><def_stmt>test_policy_types_modify_policy_type self<block_start>self.deployment.policy_types={'policy_type1':'policy_type1_value'}<line_sep>self.deployment_plan[POLICY_TYPES]={'policy_type1':'policy_type1_modified_value'}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='modify' entity_type=POLICY_TYPE entity_id='policy_types:policy_type1' supported=<false>)]<block_end><def_stmt>test_extract_steps_policy_triggers_no_change self<block_start>policy_triggers={'policy_trigger1':'policy_trigger1_value'}<line_sep>self.deployment.policy_triggers=policy_triggers<line_sep>self.deployment_plan[POLICY_TRIGGERS]=policy_triggers<line_sep>steps,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<assert_stmt>unsupported_steps<eq>[]<block_end><def_stmt>test_policy_triggers_add_policy_trigger self<block_start>self.deployment_plan[POLICY_TRIGGERS]={'policy_trigger1':'policy_trigger1_value'}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='add' entity_type=POLICY_TRIGGER entity_id='policy_triggers:policy_trigger1' supported=<false>)]<block_end><def_stmt>test_policy_triggers_remove_policy_trigger self<block_start>self.deployment.policy_triggers={'policy_trigger1':'policy_trigger1_value'}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='remove' entity_type=POLICY_TRIGGER entity_id='policy_triggers:policy_trigger1' supported=<false>)]<block_end><def_stmt>test_policy_triggers_modify_policy_trigger self<block_start>self.deployment.policy_triggers={'policy_trigger1':'policy_trigger1_value'}<line_sep>self.deployment_plan[POLICY_TRIGGERS]={'policy_trigger1':'policy_trigger1_modified_value'}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='modify' entity_type=POLICY_TRIGGER entity_id='policy_triggers:policy_trigger1' supported=<false>)]<block_end><def_stmt>test_groups_no_change self<block_start>groups={'group1':{}}<line_sep>self.deployment.groups=groups<line_sep>self.deployment_plan[GROUPS]=groups<line_sep>steps,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<assert_stmt>unsupported_steps<eq>[]<block_end><def_stmt>test_groups_add_group self<block_start>self.deployment_plan[GROUPS]={'group1':{}}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='add' entity_type=GROUP entity_id='groups:group1' supported=<false>)]<block_end><def_stmt>test_groups_remove_group self<block_start>self.deployment.groups={'group1':{}}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='remove' entity_type=GROUP entity_id='groups:group1' supported=<false>)]<block_end><def_stmt>test_groups_modify_group self<block_start>self.deployment.groups={'group1':{'members':[]}}<line_sep>self.deployment_plan[GROUPS]={'group1':{'members':['a']}}<line_sep>_,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>unsupported_steps<eq>[DeploymentUpdateStep(action='modify' entity_type=GROUP entity_id='groups:group1' supported=<false>)]<block_end><def_stmt>test_groups_member_order self<block_start>self.deployment.groups={'group1':{'members':['a' 'b']}}<line_sep>self.deployment_plan[GROUPS]={'group1':{'members':['b' 'a']}}<line_sep>steps,unsupported_steps=extract_steps({} self.deployment self.deployment_plan)<assert_stmt>steps<eq>[]<assert_stmt>unsupported_steps<eq>[]<block_end><def_stmt>test_ha_plugins_no_install self<block_start>nodes=[self._get_node_scheme(plugins_to_install=[{'name':'old' 'install':<true>}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(plugins_to_install=[{'name':'new' 'install':<false>}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<line_sep># Although install is set to False on the new plugin, we are still # creating the step. We won't need to install the plugin (the # PluginHandler takes care of that), but the value still needs to be # updated in the node in the DB <assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=PLUGIN entity_id='plugins_to_install:node1:new')]<block_end><def_stmt>test_ha_plugins_add_ha_plugin self<block_start>nodes=[self._get_node_scheme(plugins_to_install=[{'name':'old' 'install':<true>}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(plugins_to_install=[{'name':'new' 'install':<true>}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='add' entity_type=PLUGIN entity_id='plugins_to_install:node1:new' supported=<true>)]<block_end><def_stmt>test_ha_plugins_modify_ha_plugin self<block_start>nodes=[self._get_node_scheme(plugins_to_install=[{'name':'name' 'executor':'host_agent' 'install':<true> 'source':'old'}])]<line_sep>self.deployment_plan[NODES]=[self._get_node_scheme(plugins_to_install=[{'name':'name' 'executor':'host_agent' 'install':<true> 'source':'new'}])]<line_sep>steps,_=extract_steps(nodes self.deployment self.deployment_plan)<assert_stmt>steps<eq>[DeploymentUpdateStep(action='modify' entity_type=PLUGIN entity_id='plugins_to_install:node1:name' supported=<true>)]<block_end><def_stmt>test_all_changes_combined self<block_start>path_before=get_resource('deployment_update/combined_changes_before.json')<line_sep>path_after=get_resource('deployment_update/combined_changes_after.json')<with_stmt>open(path_before)<as>fp_before open(path_after)<as>fp_after<block_start>plan_before=json.load(fp_before)<line_sep>plan_after=json.load(fp_after)<block_end>nodes=list(plan_before['nodes'].values())<line_sep>plan_after['nodes']=list(plan_after['nodes'].values())<line_sep>self.deployment.groups=plan_before['groups']<line_sep>self.deployment.workflows=plan_before['workflows']<line_sep>self.deployment.policy_types=plan_before['policy_types']<line_sep>self.deployment.policy_triggers=plan_before['policy_triggers']<line_sep>self.deployment.outputs=plan_before['outputs']<line_sep>expected_steps={'modify_description':DeploymentUpdateStep('modify' DESCRIPTION 'description') 'remove_node':DeploymentUpdateStep('remove' NODE 'nodes:node1') 'add_node':DeploymentUpdateStep('add' NODE 'nodes:node2' topology_order=0) 'modify_node_changed_type':DeploymentUpdateStep('modify' NODE 'nodes:node3' supported=<false>) 'add_property':DeploymentUpdateStep('add' PROPERTY 'nodes:node4:properties:added_prop') 'remove_property':DeploymentUpdateStep('remove' PROPERTY 'nodes:node4:properties:removed_prop') 'modify_property':DeploymentUpdateStep('modify' PROPERTY 'nodes:node4:properties:modified_prop') 'remove_relationship':DeploymentUpdateStep('remove' RELATIONSHIP 'nodes:node6:relationships:[0]') 'add_relationship':DeploymentUpdateStep('add' RELATIONSHIP 'nodes:node7:relationships:[0]') 'remove_relationship_changed_target':DeploymentUpdateStep('remove' RELATIONSHIP 'nodes:node9:relationships:[0]') 'add_relationship_changed_target':DeploymentUpdateStep('add' RELATIONSHIP 'nodes:node9:relationships:[0]') 'remove_relationship_changed_type_and_target':DeploymentUpdateStep('remove' RELATIONSHIP 'nodes:node10:relationships:[0]') 'add_relationship_changed_type_and_target':DeploymentUpdateStep('add' RELATIONSHIP 'nodes:node10:relationships:[0]') 'add_operation':DeploymentUpdateStep('add' OPERATION 'nodes:node11:operations:interface1.added_operation') 'add_operation_shortened':DeploymentUpdateStep('add' OPERATION 'nodes:node11:operations:added_operation') 'remove_operation':DeploymentUpdateStep('remove' OPERATION 'nodes:node11:operations:interface1.removed_operation') 'remove_operation_shortened':DeploymentUpdateStep('remove' OPERATION 'nodes:node11:operations:removed_operation') 'modify_operation':DeploymentUpdateStep('modify' OPERATION 'nodes:node11:operations:interface1.modified_operation') 'modify_operation_shortened':DeploymentUpdateStep('modify' OPERATION 'nodes:node11:operations:modified_operation') 'add_relationship_operation':DeploymentUpdateStep('add' OPERATION 'nodes:node12:relationships:[0]:target_operations:'<concat>'interface_for_modified_and_added.added_operation') 'add_relationship_operation_shortened':DeploymentUpdateStep('add' OPERATION 'nodes:node12:relationships:[0]:target_operations:'<concat>'added_operation') 'remove_relationship_operation':DeploymentUpdateStep('remove' OPERATION 'nodes:node12:relationships:[0]:source_operations:'<concat>'interface_for_intact_and_removed.removed_operation') 'remove_relationship_operation_shortened':DeploymentUpdateStep('remove' OPERATION 'nodes:node12:relationships:[0]:source_operations:'<concat>'removed_operation') 'modify_relationship_operation':DeploymentUpdateStep('modify' OPERATION 'nodes:node12:relationships:[0]:target_operations:'<concat>'interface_for_modified_and_added.modified_operation') 'modify_relationship_operation_shortened':DeploymentUpdateStep('modify' OPERATION 'nodes:node12:relationships:[0]:target_operations:'<concat>'modified_operation') 'add_output':DeploymentUpdateStep('add' OUTPUT 'outputs:added_output') 'remove_output':DeploymentUpdateStep('remove' OUTPUT 'outputs:removed_output') 'modify_output':DeploymentUpdateStep('modify' OUTPUT 'outputs:modified_output') 'add_workflow_same_plugin':DeploymentUpdateStep('add' WORKFLOW 'workflows:added_workflow_same_plugin') 'add_workflow_new_plugin':DeploymentUpdateStep('add' WORKFLOW 'workflows:added_workflow_new_plugin') 'remove_workflow':DeploymentUpdateStep('remove' WORKFLOW 'workflows:removed_workflow') 'modify_workflow_same_plugin':DeploymentUpdateStep('modify' WORKFLOW 'workflows:modified_workflow_same_plugin') 'modify_workflow_new_plugin':DeploymentUpdateStep('modify' WORKFLOW 'workflows:modified_workflow_new_plugin') 'add_policy_type':DeploymentUpdateStep('add' POLICY_TYPE 'policy_types:added_policy_type' supported=<false>) 'remove_policy_type':DeploymentUpdateStep('remove' POLICY_TYPE 'policy_types:removed_policy_type' supported=<false>) 'modify_policy_type':DeploymentUpdateStep('modify' POLICY_TYPE 'policy_types:modified_policy_type' supported=<false>) 'add_policy_trigger':DeploymentUpdateStep('add' POLICY_TRIGGER 'policy_triggers:added_policy_trigger' supported=<false>) 'remove_policy_trigger':DeploymentUpdateStep('remove' POLICY_TRIGGER 'policy_triggers:removed_policy_trigger' supported=<false>) 'modify_policy_trigger':DeploymentUpdateStep('modify' POLICY_TRIGGER 'policy_triggers:modified_policy_trigger' supported=<false>) 'add_group':DeploymentUpdateStep('add' GROUP 'groups:added_group' supported=<false>) 'remove_group':DeploymentUpdateStep('remove' GROUP 'groups:removed_group' supported=<false>) 'modify_group':DeploymentUpdateStep('modify' GROUP 'groups:modified_group' supported=<false>) 'add_relationship_property':DeploymentUpdateStep('add' PROPERTY 'nodes:node13:relationships:[0]:'<concat>'properties:added_relationship_prop' supported=<false>) 'remove_relationship_property':DeploymentUpdateStep('remove' PROPERTY 'nodes:node13:relationships:[0]:'<concat>'properties:removed_relationship_prop' supported=<false>) 'modify_relationship_property':DeploymentUpdateStep('modify' PROPERTY 'nodes:node13:relationships:[0]:'<concat>'properties:modified_relationship_prop' supported=<false>) 'add_ha_plugin_plugins_to_install':DeploymentUpdateStep('add' PLUGIN 'plugins_to_install:node18:plugin3_name') 'add_ha_plugin_plugin3_name':DeploymentUpdateStep('add' PLUGIN 'plugins:node18:plugin3_name') 'add_cda_plugin_used_by_host':DeploymentUpdateStep('add' PLUGIN 'plugins:node16:cda_plugin_for_operations2') # the steps below are intended just to make the test pass. # ideally, they should be removed since they are incorrect 'modify_node_add_contained_in_relationship':DeploymentUpdateStep('modify' NODE 'nodes:node8' supported=<false>) 'add_cda_operation':DeploymentUpdateStep('add' OPERATION 'nodes:node16:operations:'<concat>'interface_for_plugin_based_operations.'<concat>'added_operation_new_cda_plugin' supported=<true>) 'add_cda_operation_shortened':DeploymentUpdateStep('add' OPERATION 'nodes:node16:operations:added_operation_new_cda_plugin' supported=<true>) 'add_ha_operation':DeploymentUpdateStep('add' OPERATION 'nodes:node17:operations:'<concat>'interface_for_plugin_based_operations.'<concat>'ha_operation_after' supported=<true>) 'add_ha_operation_shortened':DeploymentUpdateStep('add' OPERATION 'nodes:node17:operations:ha_operation_after' supported=<true>) 'remove_ha_operation':DeploymentUpdateStep('remove' OPERATION 'nodes:node17:operations:'<concat>'interface_for_plugin_based_operations.'<concat>'ha_operation_before' supported=<true>) 'remove_ha_operation_shortened':DeploymentUpdateStep('remove' OPERATION 'nodes:node17:operations:ha_operation_before' supported=<true>) 'modify_ha_operation':DeploymentUpdateStep('modify' OPERATION 'nodes:node18:operations:'<concat>'interface_for_plugin_based_operations.'<concat>'ha_operation_before' supported=<true>) 'modify_ha_operation_shortened':DeploymentUpdateStep('modify' OPERATION 'nodes:node18:operations:ha_operation_before' supported=<true>)}<line_sep>steps,unsupported_steps=extract_steps(nodes self.deployment plan_after)<line_sep>steps.extend(unsupported_steps)<line_sep>self.assertEqual(set(expected_steps.values()) set(steps))<block_end><block_end>
"""Basic NeoPixel LED animations for the QT Py."""<import_stmt>time<import_stmt>board<import_from_stmt>rainbowio colorwheel<import_stmt>neopixel<line_sep># Update this to match the pin to which you connected the NeoPixels pixel_pin=board.A3<line_sep># Update this to match the number of NeoPixels connected num_pixels=30<line_sep>pixels=neopixel.NeoPixel(pixel_pin num_pixels auto_write=<false>)<line_sep># Set to 0-1 to change the brightness of the NeoPixels pixels.brightness=0.2<def_stmt>blink color wait<block_start>"""Blink animation. Blinks all pixels."""<line_sep>pixels.fill(color)<line_sep>pixels.show()<line_sep>time.sleep(wait)<line_sep>pixels.fill((0 0 0))<line_sep>pixels.show()<line_sep>time.sleep(wait)<block_end><def_stmt>chase color spacing=3 iteration_step=1<block_start>"""Theatre chase animation. Chases across all pixels."""<if_stmt>spacing<l>2<block_start><raise>ValueError("Spacing must be greater than 1 to show chase pattern.")<block_end># Use modulo division to create the spacing between pixels. chase_pixel=iteration_step%spacing<line_sep># Loop over pixels and turn on expected pixels to provided color. <for_stmt>pixel range(0 len(pixels) spacing)# If the pixel is outside the total pixel range, break. <block_start><if_stmt>pixel+chase_pixel<g>len(pixels)-1<block_start><break><block_end>pixels[pixel+chase_pixel]=color<block_end>pixels.show()<line_sep># Loop over pixels and turn off expected pixels. <for_stmt>pixel range(0 len(pixels) spacing)# If the pixel is outside the total pixel range, break. <block_start><if_stmt>pixel+chase_pixel<g>len(pixels)-1<block_start><break><block_end>pixels[pixel+chase_pixel]=(0 0 0)<block_end><block_end><def_stmt>color_wipe color wait<block_start>"""Color wipe animation. Wipes across all pixels."""<for_stmt>pixel range(num_pixels)<block_start>pixels[pixel]=color<line_sep>time.sleep(wait)<line_sep>pixels.show()<block_end>time.sleep(0.5)<block_end><def_stmt>rainbow_cycle wait<block_start>"""Rainbow cycle animation. Cycles across all pixels."""<for_stmt>color_index range(255)<block_start><for_stmt>pixel range(num_pixels)<block_start>pixel_index=(pixel<times>256<floordiv>num_pixels)+color_index<line_sep>pixels[pixel]=colorwheel(pixel_index&255)<block_end>pixels.show()<line_sep>time.sleep(wait)<block_end><block_end>RED=(255 0 0)<line_sep>YELLOW=(255 150 0)<line_sep>GREEN=(0 255 0)<line_sep>CYAN=(0 255 255)<line_sep>BLUE=(0 0 255)<line_sep>PURPLE=(180 0 255)<while_stmt><true># Blink 5 times. Increase or decrease the range for more or less blinking. <block_start><for_stmt>blinks range(5)<block_start>blink(RED 0.5)<block_end># Increase number to slow down blinking, decrease to speed up. # Chase. Increase or decrease the range for longer or shorter chase animation. <for_stmt>step range(50)<block_start>chase(PURPLE spacing=4 iteration_step=step)<line_sep>time.sleep(0.05)<block_end># Fill all pixels. pixels.fill(RED)<line_sep>pixels.show()<line_sep># Increase or decrease the time to change the speed of the solid color change in seconds. time.sleep(0.5)<line_sep>pixels.fill(GREEN)<line_sep>pixels.show()<line_sep>time.sleep(0.5)<line_sep>pixels.fill(BLUE)<line_sep>pixels.show()<line_sep>time.sleep(0.5)<line_sep># Color wipe. color_wipe(YELLOW 0.01)# Increase the number to slow down the color chase. color_wipe(CYAN 0.01)<line_sep>color_wipe(PURPLE 0.01)<line_sep># Rainbow cycle. rainbow_cycle(0)<block_end># Increase the number to slow down the rainbow.
##-***************************************************************************** ## ## Copyright (c) 2009-2011, ## <NAME>, Inc. and ## Industrial Light & Magic, a division of Lucasfilm Entertainment Company Ltd. ## ## All rights reserved. ## ## Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are ## met: ## * Redistributions of source code must retain the above copyright ## notice, this list of conditions and the following disclaimer. ## * Redistributions in binary form must reproduce the above ## copyright notice, this list of conditions and the following disclaimer ## in the documentation and/or other materials provided with the ## distribution. ## * Neither the name of Sony Pictures Imageworks, nor ## Industrial Light & Magic nor the names of their contributors may be used ## to endorse or promote products derived from this software without specific ## prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT ## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ## ##-***************************************************************************** <import_from_stmt>maya cmds<as>MayaCmds<import_stmt>maya.OpenMaya<as>OpenMaya<import_stmt>os<import_stmt>unittest<import_stmt>util<class_stmt>MayaReloadTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>MayaCmds.file(new=<true> force=<true>)<line_sep>self.__files=[]<block_end><def_stmt>tearDown self<block_start><for_stmt>f self.__files<block_start>os.remove(f)<block_end><block_end># this test makes sure that not just the vertex positions but the # connection info is all correct <def_stmt>testAnimMeshReload self<block_start>MayaCmds.polyCube(name='mesh')<line_sep>MayaCmds.setKeyframe('meshShape.vtx[0:7]' time=[1 24])<line_sep>MayaCmds.setKeyframe('meshShape.vtx[0:7]')<line_sep>MayaCmds.currentTime(12 update=<true>)<line_sep>MayaCmds.select('meshShape.vtx[0:7]')<line_sep>MayaCmds.scale(5 5 5 r=<true>)<line_sep>MayaCmds.setKeyframe('meshShape.vtx[0:7]' time=[12])<line_sep>self.__files.append(util.expandFileName('testAnimMeshReadWrite.abc'))<line_sep>MayaCmds.AbcExport(j='-fr 1 24 -root mesh -f '+self.__files[-1])<line_sep># reading test MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep># save as a maya file self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># reload as a maya file MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>retVal=<true><line_sep>mesh1='|mesh|meshShape'<line_sep>mesh2='|mesh1|meshShape'<for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareMesh(mesh1 mesh2)<block_start>self.fail('%s and %s were not equal at frame %d'%(mesh1 mesh2 t))<block_end><block_end><block_end>#------------------------------------------------------------------------------- # The following tests each creates four animated nodes of the same data # type, writes out to Abc file, loads back the file and deletes one node. # Then the scene is saved as a Maya file, and load back to check if the # reload works as expected #------------------------------------------------------------------------------- <def_stmt>testAnimPolyDeleteReload self# create a poly cube and animate <block_start>shapeName='pCube'<line_sep>MayaCmds.polyCube(name=shapeName)<line_sep>MayaCmds.move(5 0 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[2:5]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[2:5]' replace=<true>)<line_sep>MayaCmds.move(0 4 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[2:5]' time=[12])<line_sep># create a poly sphere and animate shapeName='pSphere'<line_sep>MayaCmds.polySphere(name=shapeName)<line_sep>MayaCmds.move(-5 0 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:379]' shapeName+'.vtx[381]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[200:379]' shapeName+'.vtx[381]' replace=<true>)<line_sep>MayaCmds.scale(0.5 0.5 0.5 relative=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:379]' shapeName+'.vtx[381]' time=[12])<line_sep>MayaCmds.currentTime(1)<line_sep># create a poly torus and animate shapeName='pTorus'<line_sep>MayaCmds.polyTorus(name=shapeName)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:219]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[200:219]' replace=<true>)<line_sep>MayaCmds.scale(2 1 2 relative=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:219]' time=[12])<line_sep># create a poly cone and animate shapeName='pCone'<line_sep>MayaCmds.polyCone(name=shapeName)<line_sep>MayaCmds.move(0 0 -5 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[20]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[20]' replace=<true>)<line_sep>MayaCmds.move(0 4 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[20]' time=[12])<line_sep># write it out to Abc file and load back in self.__files.append(util.expandFileName('testPolyReload.abc'))<line_sep>MayaCmds.AbcExport(j='-fr 1 24 -root pCube -root pSphere -root pTorus -root pCone -file %s'%self.__files[-1])<line_sep># load back the Abc file, delete the sphere and save to a maya file MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep>MayaCmds.delete('pSphere')<line_sep>self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('pCube' 'pTorus' 'pCone' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>shapeList=MayaCmds.ls(type='mesh')<line_sep>self.failUnlessEqual(len(shapeList) 7)<line_sep>meshes=[('|pCube|pCubeShape' '|ReloadGrp|pCube|pCubeShape') ('|pTorus|pTorusShape' '|ReloadGrp|pTorus|pTorusShape') ('|pCone|pConeShape' '|ReloadGrp|pCone|pConeShape')]<for_stmt>m meshes<block_start><for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareMesh(m[0] m[1])<block_start>self.fail('%s and %s are not the same at frame %d'%(m[0] m[1] t))<block_end><block_end><block_end><block_end><def_stmt>testAnimSubDDeleteReload self# create a subD cube and animate <block_start>shapeName='cube'<line_sep>MayaCmds.polyCube(name=shapeName)<line_sep>MayaCmds.select('cubeShape')<line_sep>MayaCmds.addAttr(longName='SubDivisionMesh' attributeType='bool' defaultValue=<true>)<line_sep>MayaCmds.move(5 0 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[2:5]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[2:5]' replace=<true>)<line_sep>MayaCmds.move(0 4 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[2:5]' time=[12])<line_sep># create a subD sphere and animate shapeName='sphere'<line_sep>MayaCmds.polySphere(name=shapeName)<line_sep>MayaCmds.select('sphereShape')<line_sep>MayaCmds.addAttr(longName='SubDivisionMesh' attributeType='bool' defaultValue=<true>)<line_sep>MayaCmds.move(-5 0 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:379]' shapeName+'.vtx[381]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[200:379]' shapeName+'.vtx[381]' replace=<true>)<line_sep>MayaCmds.scale(0.5 0.5 0.5 relative=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:379]' shapeName+'.vtx[381]' time=[12])<line_sep>MayaCmds.currentTime(1)<line_sep># create a subD torus and animate shapeName='torus'<line_sep>MayaCmds.polyTorus(name=shapeName)<line_sep>MayaCmds.select('torusShape')<line_sep>MayaCmds.addAttr(longName='SubDivisionMesh' attributeType='bool' defaultValue=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:219]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[200:219]' replace=<true>)<line_sep>MayaCmds.scale(2 1 2 relative=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[200:219]' time=[12])<line_sep># create a subD cone and animate shapeName='cone'<line_sep>MayaCmds.polyCone(name=shapeName)<line_sep>MayaCmds.select('coneShape')<line_sep>MayaCmds.addAttr(longName='SubDivisionMesh' attributeType='bool' defaultValue=<true>)<line_sep>MayaCmds.move(0 0 -5 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[20]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[20]' replace=<true>)<line_sep>MayaCmds.move(0 4 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[20]' time=[12])<line_sep>self.__files.append(util.expandFileName('testSubDReload.abc'))<line_sep># write it out to Abc file and load back in MayaCmds.AbcExport(j='-fr 1 24 -root cube -root sphere -root torus -root cone -file '+self.__files[-1])<line_sep># load back the Abc file, delete the sphere and save to a maya file MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep>MayaCmds.delete('sphere')<line_sep>self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('cube' 'torus' 'cone' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>shapeList=MayaCmds.ls(type='mesh')<line_sep>self.failUnlessEqual(len(shapeList) 7)<line_sep># test the equality of cubes meshes=[('|cube|cubeShape' '|ReloadGrp|cube|cubeShape') ('|torus|torusShape' '|ReloadGrp|torus|torusShape') ('|cone|coneShape' '|ReloadGrp|cone|coneShape')]<for_stmt>m meshes<block_start><for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareMesh(m[0] m[1])<block_start>self.fail('%s and %s are not the same at frame %d'%(m[0] m[1] t))<block_end><block_end><block_end><block_end><def_stmt>testAnimNSurfaceDeleteReload self# create an animated Nurbs sphere <block_start>MayaCmds.sphere(ch=<false> name='nSphere')<line_sep>MayaCmds.move(5 0 0 relative=<true>)<line_sep>MayaCmds.select('nSphere.cv[0:1][0:7]' 'nSphere.cv[5:6][0:7]' replace=<true>)<line_sep>MayaCmds.setKeyframe(time=[1 24])<line_sep>MayaCmds.currentTime(12 update=<true>)<line_sep>MayaCmds.scale(1.5 1 1 relative=<true>)<line_sep>MayaCmds.setKeyframe(time=12)<line_sep># create an animated Nurbs torus MayaCmds.torus(ch=<false> name='nTorus')<line_sep>MayaCmds.move(-5 0 0 relative=<true>)<line_sep>MayaCmds.select('nTorus.cv[0][0:7]' 'nTorus.cv[2][0:7]' replace=<true>)<line_sep>MayaCmds.setKeyframe(time=[1 24])<line_sep>MayaCmds.currentTime(12 update=<true>)<line_sep>MayaCmds.scale(1 2 2 relative=<true>)<line_sep>MayaCmds.setKeyframe(time=12)<line_sep># create an animated Nurbs plane # should add the trim curve test on this surface, will be easier # than the rest MayaCmds.nurbsPlane(ch=<false> name='nPlane')<line_sep>MayaCmds.move(-5 5 0 relative=<true>)<line_sep>MayaCmds.select('nPlane.cv[0:3][0:3]' replace=<true>)<line_sep>MayaCmds.setKeyframe(time=1)<line_sep>MayaCmds.currentTime(12 update=<true>)<line_sep>MayaCmds.rotate(0 0 90 relative=<true>)<line_sep>MayaCmds.setKeyframe(time=12)<line_sep>MayaCmds.currentTime(24 update=<true>)<line_sep>MayaCmds.rotate(0 0 90 relative=<true>)<line_sep>MayaCmds.setKeyframe(time=24)<line_sep># create an animated Nurbs cylinder MayaCmds.cylinder(ch=<false> name='nCylinder')<line_sep>MayaCmds.select('nCylinder.cv[0][0:7]' replace=<true>)<line_sep>MayaCmds.setKeyframe(time=[1 24])<line_sep>MayaCmds.currentTime(12 update=<true>)<line_sep>MayaCmds.move(-3 0 0 relative=<true>)<line_sep>MayaCmds.setKeyframe(time=12)<line_sep># write it out to Abc file and load back in self.__files.append(util.expandFileName('testNSurfaceReload.abc'))<line_sep>MayaCmds.AbcExport(j='-fr 1 24 -root nSphere -root nTorus -root nPlane -root nCylinder -file '+self.__files[-1])<line_sep># load back the Abc file, delete the torus and save to a maya file MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep>MayaCmds.delete('nTorus')<line_sep>self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('nSphere' 'nPlane' 'nCylinder' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>surfaceList=MayaCmds.ls(type='nurbsSurface')<line_sep>self.failUnlessEqual(len(surfaceList) 7)<line_sep>surfaces=[('|nSphere|nSphereShape' '|ReloadGrp|nSphere|nSphereShape') ('|nPlane|nPlaneShape' '|ReloadGrp|nPlane|nPlaneShape') ('|nCylinder|nCylinderShape' '|ReloadGrp|nCylinder|nCylinderShape')]<for_stmt>s surfaces<block_start><for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareNurbsSurface(s[0] s[1])<block_start>self.fail('%s and %s are not the same at frame %d'%(s[0] s[1] t))<block_end><block_end><block_end><block_end><def_stmt>testAnimNSurfaceAndPolyDeleteReload self# create a poly cube and animate <block_start>shapeName='pCube'<line_sep>MayaCmds.polyCube(name=shapeName)<line_sep>MayaCmds.move(5 0 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[2:5]' time=[1 24])<line_sep>MayaCmds.currentTime(12)<line_sep>MayaCmds.select(shapeName+'.vtx[2:5]' replace=<true>)<line_sep>MayaCmds.move(0 4 0 r=<true>)<line_sep>MayaCmds.setKeyframe(shapeName+'.vtx[2:5]' time=[12])<line_sep># create an animated Nurbs plane MayaCmds.nurbsPlane(ch=<false> name='nPlane')<line_sep>MayaCmds.move(-5 5 0 relative=<true>)<line_sep>MayaCmds.select('nPlane.cv[0:3][0:3]' replace=<true>)<line_sep>MayaCmds.setKeyframe(time=1)<line_sep>MayaCmds.currentTime(12 update=<true>)<line_sep>MayaCmds.rotate(0 0 90 relative=<true>)<line_sep>MayaCmds.setKeyframe(time=12)<line_sep>MayaCmds.currentTime(24 update=<true>)<line_sep>MayaCmds.rotate(0 0 90 relative=<true>)<line_sep>MayaCmds.setKeyframe(time=24)<line_sep># write it out to Abc file and load back in self.__files.append(util.expandFileName('testNSurfaceAndPolyReload.abc'))<line_sep>MayaCmds.AbcExport(j='-fr 1 24 -root pCube -root nPlane -file '+self.__files[-1])<line_sep># load back the Abc file, delete the cube and save to a maya file MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep>MayaCmds.delete('pCube')<line_sep>self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('nPlane' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>shapeList=MayaCmds.ls(type='mesh')<line_sep>self.failUnlessEqual(len(shapeList) 1)<line_sep>surfaceList=MayaCmds.ls(type='nurbsSurface')<line_sep>self.failUnlessEqual(len(surfaceList) 2)<line_sep># test the equality of plane surface1='|nPlane|nPlaneShape'<line_sep>surface2='|ReloadGrp|nPlane|nPlaneShape'<for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareNurbsSurface(surface1 surface2)<block_start>self.fail('%s and %s are not the same at frame %d'%(surface1 surface2 t))<block_end><block_end><block_end><def_stmt>testAnimCameraDeleteReload self# cam1 <block_start>MayaCmds.camera(name='cam1')<line_sep>MayaCmds.setAttr('cam1Shape1.horizontalFilmAperture' 0.962)<line_sep>MayaCmds.setAttr('cam1Shape1.verticalFilmAperture' 0.731)<line_sep>MayaCmds.setAttr('cam1Shape1.focalLength' 50)<line_sep>MayaCmds.setAttr('cam1Shape1.focusDistance' 5)<line_sep>MayaCmds.setAttr('cam1Shape1.shutterAngle' 144)<line_sep>MayaCmds.setAttr('cam1Shape1.centerOfInterest' 1384.825)<line_sep># cam2 MayaCmds.duplicate('cam1' returnRootsOnly=<true>)<line_sep># cam3 MayaCmds.duplicate('cam1' returnRootsOnly=<true>)<line_sep># cam4 MayaCmds.duplicate('cam1' returnRootsOnly=<true>)<line_sep># animate each camera slightly different MayaCmds.currentTime(1 update=<true>)<line_sep>MayaCmds.setKeyframe('cam1Shape1' attribute='horizontalFilmAperture')<line_sep>MayaCmds.setKeyframe('cam2Shape' attribute='focalLength')<line_sep>MayaCmds.setKeyframe('cam3Shape' attribute='focusDistance')<line_sep>MayaCmds.setKeyframe('cam4Shape' attribute='shutterAngle')<line_sep>MayaCmds.setKeyframe('cam4Shape' attribute='centerOfInterest')<line_sep>MayaCmds.currentTime(24 update=<true>)<line_sep>MayaCmds.setKeyframe('cam1Shape1' attribute='horizontalFilmAperture' value=0.95)<line_sep>MayaCmds.setKeyframe('cam2Shape' attribute='focalLength' value=40)<line_sep>MayaCmds.setKeyframe('cam3Shape' attribute='focusDistance' value=5.4)<line_sep>MayaCmds.setKeyframe('cam4Shape' attribute='shutterAngle' value=174.94)<line_sep>MayaCmds.setKeyframe('cam4Shape' attribute='centerOfInterest' value=67.418)<line_sep># write them out to an Abc file and load back in self.__files.append(util.expandFileName('testCamReload.abc'))<line_sep>MayaCmds.AbcExport(j='-fr 1 24 -root cam1 -root cam2 -root cam3 -root cam4 -file '+self.__files[-1])<line_sep># load back the Abc file, delete the 2nd camera and save to a maya file MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep>MayaCmds.delete('cam2')<line_sep>self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('cam1' 'cam3' 'cam4' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>camList=MayaCmds.ls(type='camera')<line_sep># should be 7, but this query will return the four standard cameras in # the scene too self.failUnlessEqual(len(camList) 11)<line_sep># test the equality of cameras cameras=[('|cam1|cam1Shape1' '|ReloadGrp|cam1|cam1Shape1') ('|cam3|cam3Shape' '|ReloadGrp|cam3|cam3Shape') ('|cam4|cam4Shape' '|ReloadGrp|cam4|cam4Shape')]<for_stmt>c cameras<block_start><for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareCamera(c[0] c[1])<block_start>self.fail('%s and %s are not the same at frame %d'%(c[0] c[1] t))<block_end><block_end><block_end><block_end><def_stmt>testAnimNCurvesDeleteReload self# create some animated curves <block_start>MayaCmds.textCurves(ch=<false> t='Maya' name='Curves' font='Courier')<line_sep>MayaCmds.currentTime(1 update=<true>)<line_sep>MayaCmds.select('curve1.cv[0:27]' 'curve2.cv[0:45]' 'curve3.cv[0:15]' 'curve4.cv[0:19]' 'curve5.cv[0:45]' 'curve6.cv[0:15]' replace=<true>)<line_sep>MayaCmds.setKeyframe()<line_sep>MayaCmds.currentTime(24 update=<true>)<line_sep>MayaCmds.select('curve1.cv[0:27]' replace=<true>)<line_sep>MayaCmds.move(-3 3 0 relative=<true>)<line_sep>MayaCmds.select('curve2.cv[0:45]' 'curve3.cv[0:15]' replace=<true>)<line_sep>MayaCmds.scale(1.5 1.5 1.5 relative=<true>)<line_sep>MayaCmds.select('curve4.cv[0:19]' replace=<true>)<line_sep>MayaCmds.move(1.5 0 0 relative=<true>)<line_sep>MayaCmds.rotate(0 90 0 relative=<true>)<line_sep>MayaCmds.select('curve5.cv[0:45]' 'curve6.cv[0:15]' replace=<true>)<line_sep>MayaCmds.move(3 0 0 relative=<true>)<line_sep>MayaCmds.select('curve1.cv[0:27]' 'curve2.cv[0:45]' 'curve3.cv[0:15]' 'curve4.cv[0:19]' 'curve5.cv[0:45]' 'curve6.cv[0:15]' replace=<true>)<line_sep>MayaCmds.setKeyframe()<line_sep># write them out to an Abc file and load back in self.__files.append(util.expandFileName('testNCurvesReload.abc'))<line_sep>MayaCmds.AbcExport(j='-fr 1 24 -root CurvesShape -file '+self.__files[-1])<line_sep># load back the Abc file, delete the 2nd letter and save to a maya file MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep># delete letter "a" which has two curves MayaCmds.delete('Char_a_1')<line_sep>self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('CurvesShape' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>curveList=MayaCmds.ls(type='nurbsCurve')<line_sep>self.failUnlessEqual(len(curveList) 10)<line_sep># test the equality of curves curves=[('|CurvesShape|Char_M_1|curve1|curveShape1' '|ReloadGrp|CurvesShape|Char_M_1|curve1|curveShape1') ('|CurvesShape|Char_y_1|curve4|curveShape4' '|ReloadGrp|CurvesShape|Char_y_1|curve4|curveShape4') ('|CurvesShape|Char_a_2|curve5|curveShape5' '|ReloadGrp|CurvesShape|Char_a_2|curve5|curveShape5') ('|CurvesShape|Char_a_2|curve6|curveShape6' '|ReloadGrp|CurvesShape|Char_a_2|curve6|curveShape6')]<for_stmt>c curves<block_start><for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareNurbsCurve(c[0] c[1])<block_start>self.fail('%s and %s are not the same at frame %d'%(c[0] c[1] t))<block_end><block_end><block_end><block_end>#------------------------------------------------------------------------- <def_stmt>testAnimNCurveGrpDeleteReload self# create an animated curves group <block_start>MayaCmds.textCurves(ch=<false> t='haka' name='Curves' font='Courier')<line_sep>MayaCmds.addAttr(longName='riCurves' at='bool' dv=<true>)<line_sep>MayaCmds.currentTime(1 update=<true>)<line_sep>MayaCmds.select('curve1.cv[0:27]' 'curve2.cv[0:45]' 'curve3.cv[0:15]' 'curve4.cv[0:19]' 'curve5.cv[0:45]' 'curve6.cv[0:15]' replace=<true>)<line_sep>MayaCmds.setKeyframe()<line_sep>MayaCmds.currentTime(24 update=<true>)<line_sep>MayaCmds.select('curve1.cv[0:27]' replace=<true>)<line_sep>MayaCmds.move(-3 3 0 relative=<true>)<line_sep>MayaCmds.select('curve2.cv[0:45]' 'curve3.cv[0:15]' replace=<true>)<line_sep>MayaCmds.scale(1.5 1.5 1.5 relative=<true>)<line_sep>MayaCmds.select('curve4.cv[0:19]' replace=<true>)<line_sep>MayaCmds.move(1.5 0 0 relative=<true>)<line_sep>MayaCmds.rotate(0 90 0 relative=<true>)<line_sep>MayaCmds.select('curve5.cv[0:45]' 'curve6.cv[0:15]' replace=<true>)<line_sep>MayaCmds.move(3 0 0 relative=<true>)<line_sep>MayaCmds.select('curve1.cv[0:27]' 'curve2.cv[0:45]' 'curve3.cv[0:15]' 'curve4.cv[0:19]' 'curve5.cv[0:45]' 'curve6.cv[0:15]' replace=<true>)<line_sep>MayaCmds.setKeyframe()<line_sep># write them out to an Abc file and load back in self.__files.append(util.expandFileName('testNCurveGrpReload.abc'))<line_sep>MayaCmds.AbcExport(j='-fr 1 24 -root CurvesShape -file '+self.__files[-1])<line_sep># load back the Abc file, delete the 2nd letter and save to a maya file MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep># delete letter "a" which has two curves, but as a curve group. # the curve shapes are renamed under the group node MayaCmds.delete('CurvesShape1')<line_sep>MayaCmds.delete('CurvesShape2')<line_sep>self.__files.append(util.expandFileName('testCurves.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('|CurvesShape' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep>curveList=MayaCmds.ls(type='nurbsCurve')<line_sep>self.failUnlessEqual(len(curveList) 10)<line_sep>curves=[('|CurvesShape|CurvesShape' '|ReloadGrp|CurvesShape|CurvesShape') ('|CurvesShape|CurvesShape8' '|ReloadGrp|CurvesShape|CurvesShape3') ('|CurvesShape|CurvesShape9' '|ReloadGrp|CurvesShape|CurvesShape4') ('|CurvesShape|CurvesShape10' '|ReloadGrp|CurvesShape|CurvesShape5')]<for_stmt>c curves<block_start><for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<if_stmt><not>util.compareNurbsCurve(c[0] c[1])<block_start>self.fail('%s and %s are not the same at frame %d'%(c[0] c[1] t))<block_end><block_end><block_end><block_end><def_stmt>testAnimPropDeleteReload self# create some animated properties on a transform node ( could be any type ) <block_start>nodeName=MayaCmds.polyPrism(ch=<false> name='prism')<line_sep>MayaCmds.addAttr(longName='SPT_int8' defaultValue=0 attributeType='byte' keyable=<true>)<line_sep>MayaCmds.addAttr(longName='SPT_int16' defaultValue=100 attributeType='short' keyable=<true>)<line_sep>MayaCmds.addAttr(longName='SPT_int32' defaultValue=1000 attributeType='long' keyable=<true>)<line_sep>MayaCmds.addAttr(longName='SPT_float' defaultValue=0.57777777 attributeType='float' keyable=<true>)<line_sep>MayaCmds.addAttr(longName='SPT_double' defaultValue=5.0456435 attributeType='double' keyable=<true>)<line_sep>MayaCmds.currentTime(1 update=<true>)<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_int8')<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_int16')<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_int32')<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_float')<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_double')<line_sep>MayaCmds.currentTime(24 update=<true>)<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_int8' value=8)<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_int16' value=16)<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_int32' value=32)<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_float' value=5.24847)<line_sep>MayaCmds.setKeyframe(nodeName attribute='SPT_double' value=3.14154)<line_sep># create SPT_HWColor on the shape node MayaCmds.select('prismShape')<line_sep>MayaCmds.addAttr(longName='SPT_HwColorR' defaultValue=1.0 minValue=0.0 maxValue=1.0)<line_sep>MayaCmds.addAttr(longName='SPT_HwColorG' defaultValue=1.0 minValue=0.0 maxValue=1.0)<line_sep>MayaCmds.addAttr(longName='SPT_HwColorB' defaultValue=1.0 minValue=0.0 maxValue=1.0)<line_sep>MayaCmds.addAttr(longName='SPT_HwColor' usedAsColor=<true> attributeType='float3')<line_sep>MayaCmds.currentTime(1 update=<true>)<line_sep>MayaCmds.setKeyframe(at='SPT_HwColorR')<line_sep>MayaCmds.setKeyframe(at='SPT_HwColorG')<line_sep>MayaCmds.setKeyframe(at='SPT_HwColorB')<line_sep>MayaCmds.currentTime(24 update=<true>)<line_sep>MayaCmds.setKeyframe(at='SPT_HwColorR' value=0.5)<line_sep>MayaCmds.setKeyframe(at='SPT_HwColorG' value=0.15)<line_sep>MayaCmds.setKeyframe(at='SPT_HwColorB' value=0.75)<line_sep># write them out to an Abc file and load back in self.__files.append(util.expandFileName('testPropReload.abc'))<line_sep>MayaCmds.AbcExport(j='-atp SPT_ -fr 1 24 -root prism -file '+self.__files[-1])<line_sep># load back the Abc file, delete the 2nd letter and save to a maya file abcNode=MayaCmds.AbcImport(self.__files[-1] mode='open')<line_sep># delete connections to animated props prop=MayaCmds.listConnections('|prism.SPT_float' p=<true>)[0]<line_sep>MayaCmds.disconnectAttr(prop '|prism.SPT_float')<line_sep>attr='|prism|prismShape.SPT_HwColorG'<line_sep>prop=MayaCmds.listConnections(attr p=<true>)[0]<line_sep>MayaCmds.disconnectAttr(prop attr)<line_sep>self.__files.append(util.expandFileName('test.mb'))<line_sep>MayaCmds.file(rename=self.__files[-1])<line_sep>MayaCmds.file(save=<true>)<line_sep># import the saved maya file to compare with the original scene MayaCmds.file(self.__files[-1] open=<true>)<line_sep>MayaCmds.select('prism' replace=<true>)<line_sep>MayaCmds.group(name='ReloadGrp')<line_sep>MayaCmds.AbcImport(self.__files[-2] mode='import')<line_sep># test the equality of props <for_stmt>t range(1 25)<block_start>MayaCmds.currentTime(t update=<true>)<line_sep>self.failUnlessEqual(MayaCmds.getAttr('|prism.SPT_int8') MayaCmds.getAttr('|ReloadGrp|prism.SPT_int8') 'prism.SPT_int8 not equal')<line_sep>self.failUnlessEqual(MayaCmds.getAttr('|prism.SPT_int16') MayaCmds.getAttr('|ReloadGrp|prism.SPT_int16') 'prism.SPT_int16 not equal')<line_sep>self.failUnlessEqual(MayaCmds.getAttr('|prism.SPT_int32') MayaCmds.getAttr('|ReloadGrp|prism.SPT_int32') 'prism.SPT_int32 not equal')<line_sep>self.failUnlessAlmostEqual(MayaCmds.getAttr('|prism.SPT_double') MayaCmds.getAttr('|ReloadGrp|prism.SPT_double') 4 'prism.SPT_double not equal')<line_sep>self.failUnlessAlmostEqual(MayaCmds.getAttr('|prism|prismShape.SPT_HwColorR') MayaCmds.getAttr('|ReloadGrp|prism|prismShape.SPT_HwColorR') 4 'prismShape.SPT_HwColorR not equal')<line_sep>self.failUnlessAlmostEqual(MayaCmds.getAttr('|prism|prismShape.SPT_HwColorB') MayaCmds.getAttr('|ReloadGrp|prism|prismShape.SPT_HwColorB') 4 'prismShape.SPT_HwColorB not equal')<block_end><block_end><block_end>
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Binary for WikiHop eval."""<import_stmt>json<import_from_stmt>typing Any Dict List Text Tuple<import_stmt>numpy<as>np<import_stmt>tensorflow.compat.v1<as>tf<import_from_stmt>etcmodel.models tokenization<import_from_stmt>etcmodel.models.wikihop data_utils<line_sep>tf.compat.v1.disable_v2_behavior()<line_sep>flags=tf.flags<line_sep>FLAGS=flags.FLAGS<line_sep>FLAGS=flags.FLAGS<line_sep># Populate these constants appropriately at the time of submisssion. MODEL_PATH="105/"<line_sep>SPM_MODEL_VOCAB="vocab_gpt.model"<class_stmt>WikiHopInference(object)<block_start>"""WikiHop for inference / prediction using SavedModel."""<def_stmt>__init__ self model_dir_path:Text session_target:Text<block_start>"""Loads the WikiHop from an exported `tf.SavedModel`. Args: model_dir_path: Path to the exported directory of the model. session_target: The session target. """<line_sep>self.sess=tf.Session(graph=tf.Graph() target=session_target)<line_sep># Loads the saved model (graph + variables) to the given session. graph_def=tf.saved_model.load(self.sess tags=[tf.saved_model.tag_constants.SERVING] export_dir=model_dir_path)<line_sep>signature=graph_def.signature_def[tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]<line_sep>self.input_tensor_name=signature.inputs["serialized_tf_example"].name<line_sep>self.logits_tensor_name=signature.outputs["logits"].name<block_end><def_stmt>predict self serialized_tf_examples:List[Text]<arrow>List[List[List[float]]]<block_start>"""Retrieves logits for the given list of serialized tf examples. Args: serialized_tf_examples: Batched input serialized_tf_examples. Returns: A List[List[float]] representing the logits. Each entry i in the list corresponds to the result i-th serialized_tf_example. """<line_sep>feed_dict={self.input_tensor_name:serialized_tf_examples}<line_sep>logits=self.sess.run([self.logits_tensor_name] feed_dict=feed_dict)<line_sep><return>logits<block_end><block_end><def_stmt>get_serialized_tf_example wikihop_example:data_utils.WikiHopExample tokenizer:tokenization.FullTokenizer long_seq_len:int=4096 global_seq_len:int=430 max_num_sentences:int=200<arrow>Text<block_start>"""Returns serialized TF example from the given json example."""<line_sep>converter=data_utils.WikiHopTFExampleConverter(tokenizer=tokenizer long_seq_len=long_seq_len global_seq_len=global_seq_len max_num_sentences=max_num_sentences)<line_sep>tf_example=converter.convert_single_example(example=wikihop_example)<line_sep><return>tf_example.SerializeToString()<block_end><def_stmt>get_predicted_answer wikihop_example:data_utils.WikiHopExample logits:List[float] global_seq_len:int=430<arrow>Text<block_start>"""Returns prediçted answer for the given example and its logits."""<assert_stmt>len(logits)<eq>global_seq_len ("Mismatch in logits len. Expected: {}, found: {}, logits are: {} ".format(global_seq_len len(logits) logits))<line_sep>logits=logits[0:len(wikihop_example.candidate_answers)]<line_sep>max_label_index=np.argmax(logits)<assert_stmt>max_label_index<ge>0<and>(max_label_index<l>len(wikihop_example.candidate_answers))<line_sep>answer=wikihop_example.candidate_answers[max_label_index]<line_sep>answer=answer.lower().strip()<line_sep><return>answer<block_end><def_stmt>get_output_single_example tokenizer:tokenization.FullTokenizer wikihop_inference:WikiHopInference json_obj:Dict[Text Any] long_seq_len:int=4096 global_seq_len:int=430 max_num_sentences:int=200<arrow>Tuple[Text Text]<block_start>"""Generates output for a single example."""<line_sep>wikihop_example=data_utils.WikiHopExample.from_json(single_example=json_obj)<line_sep>serialized_tf_example=get_serialized_tf_example(wikihop_example=wikihop_example tokenizer=tokenizer long_seq_len=long_seq_len global_seq_len=global_seq_len max_num_sentences=max_num_sentences)<line_sep>logits=wikihop_inference.predict([serialized_tf_example])[0][0]<assert_stmt>len(logits)<eq>global_seq_len ("Mismatch in0 logits len. Expected: {}, found: {} for example_id: {}. "<concat>"Actual logits are: {}".format(global_seq_len len(logits) wikihop_example.example_id logits))<line_sep>answer=get_predicted_answer(wikihop_example=wikihop_example logits=logits global_seq_len=global_seq_len)<line_sep><return>(wikihop_example.example_id answer)<block_end><def_stmt>generate_eval_output_bulk json_examples:List[Dict[Text Any]] model_dir_path:Text tokenizer:tokenization.FullTokenizer long_seq_len:int=4096 global_seq_len:int=430 max_num_sentences:int=200 batch_size:int=4 session_target:Text=""<arrow>Dict[Text Any]<block_start>"""Bulk mode inference."""<line_sep>serialized_tf_examples=[]<line_sep>wikihop_examples=[]<line_sep>output={}<for_stmt>json_obj json_examples<block_start>wikihop_example=data_utils.WikiHopExample.from_json(single_example=json_obj)<line_sep>wikihop_examples.append(wikihop_example)<line_sep>serialize_tf_example=get_serialized_tf_example(wikihop_example=wikihop_example tokenizer=tokenizer long_seq_len=long_seq_len global_seq_len=global_seq_len max_num_sentences=max_num_sentences)<line_sep>serialized_tf_examples.append(serialize_tf_example)<block_end>wikihop_inference=WikiHopInference(model_dir_path=model_dir_path session_target=session_target)<line_sep>index=0<line_sep>num_examples=len(serialized_tf_examples)<line_sep># Note that we getting "all" the serialized examples and then "batching" # only for prediction. The bottleneck is almost always going to be the # GPU anyway (for both memory and compute). <while_stmt>index<l>num_examples<block_start>predict_batch=serialized_tf_examples[index:min(index+batch_size num_examples)]<line_sep>batch_logits=wikihop_inference.predict(predict_batch)[0]<for_stmt>(offset logits) enumerate(batch_logits)<block_start>answer=get_predicted_answer(wikihop_example=wikihop_examples[index+offset] logits=logits global_seq_len=global_seq_len)<line_sep>output[wikihop_examples[index+offset].example_id]=answer<block_end>index<augadd>batch_size<block_end><return>output<block_end><def_stmt>generate_eval_output json_examples:List[Dict[Text Any]] tokenizer:tokenization.FullTokenizer model_dir_path:Text long_seq_len:int=4096 global_seq_len:int=430 max_num_sentences:int=200 batch_inference:bool=<false> batch_size:int=4 session_target:Text=""<arrow>Dict[Text Any]<block_start>"""Generates output for the input json. Returns the dict output key'ed by the example_id, with the value being the answer string. Args: json_examples: List of examples loaded from json input file. tokenizer: The BERT or ALBERT tokenizer. model_dir_path: The path to the directory containing the SavedModel. long_seq_len: The long input. global_seq_len: The global input. max_num_sentences: The max num sentences to be used per example. batch_inference: If True, we batch together all the examples at once for faster inference. Given that there are only 1K test examples, we might be able to fit everything in memeroy (500K per example * 1K). batch_size: Number of examples to be batched in one to predict. Applicable only when `batch_inference` is set to True. session_target: The TF session target. Returns: Dict[Text, Text] key'ed by the example_id to the corresponding prediction answer. """<line_sep>output={}<if_stmt>batch_inference<block_start><return>generate_eval_output_bulk(json_examples=json_examples model_dir_path=model_dir_path tokenizer=tokenizer long_seq_len=long_seq_len global_seq_len=global_seq_len max_num_sentences=max_num_sentences batch_size=batch_size session_target=session_target)<block_end>wikihop_inference=WikiHopInference(model_dir_path=model_dir_path session_target=session_target)<for_stmt>json_obj json_examples<block_start>(example_id label)=get_output_single_example(tokenizer=tokenizer wikihop_inference=wikihop_inference json_obj=json_obj long_seq_len=long_seq_len global_seq_len=global_seq_len max_num_sentences=max_num_sentences)<line_sep>output[example_id]=label<block_end><return>output<block_end><def_stmt>main argv<block_start><if_stmt>len(argv)<ne>3<block_start><raise>tf.app.UsageError("Exactly two arguments expected.")<block_end>input_json_filepath=argv[1].strip()<line_sep>output_json_filepath=argv[2].strip()<line_sep>tokenizer=tokenization.FullTokenizer(vocab_file=<none> do_lower_case=<none> spm_model_file=SPM_MODEL_VOCAB)<with_stmt>tf.gfile.Open(input_json_filepath "r")<as>test_data<block_start>json_examples=json.load(test_data)<block_end>predictions=generate_eval_output(tokenizer=tokenizer json_examples=json_examples model_dir_path=MODEL_PATH)<with_stmt>tf.gfile.GFile(output_json_filepath "w")<as>output_writer<block_start>json.dump(predictions output_writer)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>tf.app.run()<block_end>
""" General IaC constants """<line_sep>PARAMETER_OVERRIDES="parameter_overrides"<line_sep>GLOBAL_PARAMETER_OVERRIDES="global_parameter_overrides"<line_sep>
################################################## ## Set relative file paths ## <import_stmt>csv<import_stmt>sys<import_stmt>os<import_stmt>numpy<as>np<import_stmt>json<line_sep>absFilePath=os.path.abspath(__file__)<line_sep>fileDir=os.path.dirname(os.path.abspath(__file__))<line_sep>parentDir=os.path.dirname(fileDir)<line_sep>newPath=os.path.join(parentDir 'core')<line_sep>sys.path.append(newPath)<import_stmt>sco2_cycle_ssc<as>sco2_solve<import_stmt>sco2_plots<as>cy_plt<line_sep>################################################## ################################################## <def_stmt>get_sco2_design_parameters <block_start>des_par={}<line_sep>des_par["htf"]=17# [-] Solar salt des_par["T_htf_hot_des"]=574.0# [C] HTF design hot temperature (PHX inlet) [cite: sunshot] des_par["dT_PHX_hot_approach"]=20.0# [C/K] default 20. Temperature difference between hot HTF and turbine inlet [cite: neises/turchi] des_par["dT_PHX_cold_approach"]=20# [C/K] default 20. Temperature difference between cold HTF and cold CO2 PHX inlet [enforces CR = 1] des_par["T_amb_des"]=40.0# [C] Ambient temperature at design [cite: neises/turchi] des_par["dT_mc_approach"]=6.0# [C] Use 6 here per [Neises & Turchi 19]. Temperature difference between main compressor CO2 inlet and ambient air des_par["site_elevation"]=588# [m] Elevation of Daggett, CA. Used to size air cooler... des_par["W_dot_net_des"]=115.0# [MWe] Design cycle power output (no cooling parasitics) des_par["design_method"]=3# [-] 1 = specify efficiency, 2 = specify total recup UA, 3 = Specify each recuperator design (see inputs below) des_par["eta_thermal_des"]=-1# [-] Power cycle thermal efficiency, not used here des_par["UA_recup_tot_des"]=-1# [kW/K] des_par["cycle_config"]=1# [1] = RC, [2] = PC des_par["is_recomp_ok"]=1# [-] Use simple cycle for now. 1 = Yes, 0 = simple cycle only des_par["is_P_high_fixed"]=1# [-] 0 = No, optimize. 1 = Yes des_par["is_PR_fixed"]=0# [-] 0 = No, >0 = Yes des_par["des_objective"]=1# [-] 2 = hit min deltaT then max efficiency, != 2 = max efficiency des_par["min_phx_deltaT"]=1000# [C] Min allowable deltaT across PHX des_par["rel_tol"]=3# [-] Baseline solver and optimization relative tolerance exponent (10^-rel_tol) # Weiland & Thimsen 2016 # In most studies, 85% is an accepted isentropic efficiency for either the main or recompression compressors, and is the recommended assumption. des_par["eta_isen_mc"]=0.85# [-] Main compressor isentropic efficiency des_par["eta_isen_rc"]=0.85# [-] Recompressor isentropic efficiency des_par["eta_isen_pc"]=0.85# [-] Precompressor isentropic efficiency # Weiland & Thimsen 2016 # Recommended turbine efficiencies are 90% for axial turbines above 30 MW, and 85% for radial turbines below 30 MW. des_par["eta_isen_t"]=0.90# [-] Turbine isentropic efficiency des_par["P_high_limit"]=25# [MPa] Cycle high pressure limit # <NAME> 2016 # Multiple literature sources suggest that recuperator cold side (high pressure) pressure drop of # approximately 140 kPa (20 psid) and a hot side (low pressure) pressure drop of 280 kPa (40 psid) can be reasonably used. # Note: Unclear what the low pressure assumption is in this study, could be significantly lower for direct combustion cycles eff_max=1.0<line_sep>deltaP_recup_HP=0.0056# [-] 0.0056 = 0.14[MPa]/25[MPa] deltaP_recup_LP=0.0311# [-] 0.0311 = 0.28[MPa]/9[MPa] # LTR des_par["LTR_design_code"]=2# 1 = UA, 2 = min dT, 3 = effectiveness des_par["LTR_UA_des_in"]=-1# [kW/K] (required if LTR_design_code == 1 and design_method == 3) not used des_par["LTR_min_dT_des_in"]=10.0<line_sep># [C] (required if LTR_design_code == 2 and design_method == 3) "reasonable value" from Neises/Turchi des_par["LTR_eff_des_in"]=-1# [-] (required if LTR_design_code == 3 and design_method == 3) des_par["LT_recup_eff_max"]=eff_max# [-] Maximum effectiveness low temperature recuperator des_par["LTR_LP_deltaP_des_in"]=deltaP_recup_LP# [-] des_par["LTR_HP_deltaP_des_in"]=deltaP_recup_HP# [-] # HTR des_par["HTR_design_code"]=2# 1 = UA, 2 = min dT, 3 = effectiveness des_par["HTR_UA_des_in"]=-1# [kW/K] (required if LTR_design_code == 1 and design_method == 3) des_par["HTR_min_dT_des_in"]=10.0<line_sep># [C] (required if LTR_design_code == 2 and design_method == 3) "reasonable value" from Neises/Turchi des_par["HTR_eff_des_in"]=-1# [-] (required if LTR_design_code == 3 and design_method == 3) des_par["HT_recup_eff_max"]=eff_max# [-] Maximum effectiveness high temperature recuperator des_par["HTR_LP_deltaP_des_in"]=deltaP_recup_LP# [-] des_par["HTR_HP_deltaP_des_in"]=deltaP_recup_HP# [-] # PHX des_par["PHX_co2_deltaP_des_in"]=deltaP_recup_HP# [-] # Air Cooler des_par["deltaP_cooler_frac"]=0.005<line_sep># [-] Fraction of CO2 inlet pressure that is design point cooler CO2 pressure drop des_par["fan_power_frac"]=0.02<line_sep># [-] Fraction of net cycle power consumed by air cooler fan. 2% here per Turchi et al. # Default des_par["deltaP_counterHX_frac"]=-1<line_sep># [-] Fraction of CO2 inlet pressure that is design point counterflow HX (recups & PHX) pressure drop # Off Design des_par["od_rel_tol"]=3# [-] Baseline off-design relative convergence tolerance exponent (10^-od_rel_tol) <return>des_par<block_end><def_stmt>make_udpc_plots_from_json_dict json_file_name<block_start>udpc_dict=json.load(open(json_file_name))<line_sep>print("HTF cold design = "+str(udpc_dict["T_htf_cold_des"])+" C")<line_sep>T_hot_str="HTF Hot Temperature (Design page) = "+str(udpc_dict["T_htf_hot_des"])+" C"<line_sep>T_cold_str="HTF Cold Temperature (Design page) = "+str(udpc_dict["T_htf_cold_des"])+" C"<line_sep>eta_str="Cycle Thermal Efficiency (Design page) = "+str(udpc_dict["eta_thermal_calc"])+" -"<line_sep>T_amb_str="Ambient Temperature (Power Cycle page) = "+str(udpc_dict["T_amb_des"])+" C"<line_sep>W_dot_cool_str="Cooling Parasitic (Power Cycle page) = "+str(udpc_dict["fan_power_frac"])+" -"<line_sep>od_T_t_in_mode=udpc_dict["od_T_t_in_mode"]<line_sep>n_T_htf=int(udpc_dict["udpc_n_T_htf"])<line_sep>n_T_amb=int(udpc_dict["udpc_n_T_amb"])<line_sep>n_m_dot_htf=int(udpc_dict["udpc_n_m_dot_htf"])<line_sep>udpc_data=udpc_dict["udpc_table"]<line_sep>s_cycle_des=T_hot_str+"\n"+T_cold_str+"\n"+eta_str+"\n"+T_amb_str+"\n"+W_dot_cool_str+"\n"<line_sep>cy_plt.plot_udpc_results(udpc_data n_T_htf n_T_amb n_m_dot_htf "updc_data_read" s_cycle_des od_T_t_in_mode)<block_end>###################################### ###################################### "Generate data for SAM's User Defined Power Cycle Model"<line_sep># Instantiate sco2 cycle simulation class c_sco2=sco2_solve.C_sco2_sim(1)# Initialize as same cycle config as specified above # Get default design parameters. These are different than the "baseline" default parameters in "sco2_cycle_ssc.py" sco2_des_par_default=get_sco2_design_parameters()<line_sep>c_sco2.overwrite_default_design_parameters(sco2_des_par_default)<line_sep># Setup string for naming files des_sim_label_str="T_amb_des"+'{:.1f}'.format(sco2_des_par_default["T_amb_des"])<line_sep>mod_base_dict={"od_generate_udpc":[1.0]}<line_sep>mod_base_dict["od_rel_tol"]=2<line_sep>c_sco2.overwrite_des_par_base(mod_base_dict)# Overwrite baseline design parameters c_sco2.solve_sco2_case()# Run design simulation print(c_sco2.m_solve_dict["eta_thermal_calc"])<line_sep>print("\nDid the simulation code with "<concat>"modified design parameters solve successfully = " c_sco2.m_solve_success)<line_sep>c_sco2.m_also_save_csv=<true><line_sep>c_sco2.save_m_solve_dict(""+des_sim_label_str+"_UDPC_mspt_default")# Save design solved_dict=c_sco2.m_solve_dict<line_sep>udpc_data=solved_dict["udpc_table"]<line_sep>HTF_cold_str="HTF cold design = "+str(solved_dict["T_htf_cold_des"])+" C"<line_sep>T_co2_in_str="CO2 PHX in Temp design = "+str(solved_dict["T_co2_PHX_in"])+" C"<line_sep>P_co2_in_str="CO2 PHX in Pressure design = "+str(solved_dict["P_co2_PHX_in"])+" MPa"<line_sep>T_turb_str="CO2 Turbine in Temp design = "+str(solved_dict["T_turb_in"])+" C"<line_sep>P_turb_str="CO2 Turbine in Pressure design = "+str(solved_dict["t_P_in_des"])+" MPa"<line_sep>eta_str="Cycle Thermal Efficiency (Design page) = "+str(solved_dict["eta_thermal_calc"])+" -"<line_sep>T_amb_str="Ambient Temperature (Power Cycle page) = "+str(solved_dict["T_amb_des"])+" C"<line_sep>W_dot_cool_str="Cooling Parasitic (Power Cycle page) = "+str(solved_dict["fan_power_frac"])+" -"<line_sep>#SSC_OUTPUT, SSC_MATRIX, "udpc_table", "Columns (7): HTF Temp [C], HTF ND mass flow [-], Ambient Temp [C], ND Power, ND Heat In, ND Fan Power, ND Water. Rows = runs" <with_stmt>open("udpc_outputs"+'.csv' 'w' newline='')<as>f<block_start>w=csv.writer(f)<line_sep>w.writerows(solved_dict["udpc_table"])<block_end>f.close()<line_sep>n_T_htf=int(solved_dict["udpc_n_T_htf"])<line_sep>n_T_amb=int(solved_dict["udpc_n_T_amb"])<line_sep>n_m_dot_htf=int(solved_dict["udpc_n_m_dot_htf"])<line_sep>s_cycle_des=HTF_cold_str+"\n"+T_co2_in_str+"\n"+P_co2_in_str+"\n"+T_turb_str+"\n"+P_turb_str+"\n"+eta_str+"\n"+T_amb_str+"\n"+W_dot_cool_str+"\n"<line_sep>cy_plt.plot_udpc_results(udpc_data n_T_htf n_T_amb n_m_dot_htf "updc_mspt_default" s_cycle_des)<line_sep>
<import_stmt>paddle<import_stmt>paddle.nn<as>nn<import_stmt>paddlenlp<import_from_stmt>functools partial<import_from_stmt>paddlenlp.datasets MapDataset<import_from_stmt>paddlenlp.data Stack Tuple Pad<import_from_stmt>paddlenlp.layers LinearChainCrf ViterbiDecoder LinearChainCrfLoss<import_from_stmt>paddlenlp.metrics ChunkEvaluator<import_from_stmt>utils load_dict evaluate predict parse_decodes1 parse_decodes2<import_from_stmt>paddlenlp.transformers ErnieTokenizer ErnieForTokenClassification ErnieGramTokenizer ErnieGramForTokenClassification<import_from_stmt>utils convert_example<def_stmt>load_dataset datafiles<block_start><def_stmt>read data_path<block_start><with_stmt>open(data_path 'r' encoding='utf-8')<as>fp<block_start>next(fp)<for_stmt>line fp.readlines()<block_start>words,labels=line.strip('\n').split('\t')<line_sep>words=words.split('\002')<line_sep>labels=labels.split('\002')<line_sep><yield>words labels<block_end><block_end><block_end><if_stmt>isinstance(datafiles str)<block_start><return>MapDataset(list(read(datafiles)))<block_end><elif_stmt>isinstance(datafiles list)<or>isinstance(datafiles tuple)<block_start><return>[MapDataset(list(read(datafile)))<for>datafile datafiles]<block_end><block_end>train_ds,dev_ds,test_ds=load_dataset(datafiles=('./waybill_data/train.txt' './waybill_data/dev.txt' './waybill_data/test.txt'))<line_sep>label_vocab=load_dict('./conf/tag.dic')<line_sep># 设置想要使用模型的名称 MODEL_NAME="ernie-1.0"<line_sep>tokenizer=ErnieTokenizer.from_pretrained(MODEL_NAME)<line_sep>trans_func=partial(convert_example tokenizer=tokenizer label_vocab=label_vocab)<line_sep>train_ds.map(trans_func)<line_sep>dev_ds.map(trans_func)<line_sep>test_ds.map(trans_func)<line_sep>ignore_label=-1<line_sep>batchify_fn=<lambda>samples fn=Tuple(Pad(axis=0 pad_val=tokenizer.pad_token_id) # input_ids Pad(axis=0 pad_val=tokenizer.pad_token_type_id) # token_type_ids Stack() # seq_len Pad(axis=0 pad_val=ignore_label)# labels ):fn(samples)<line_sep>train_loader=paddle.io.DataLoader(dataset=train_ds batch_size=200 return_list=<true> collate_fn=batchify_fn)<line_sep>dev_loader=paddle.io.DataLoader(dataset=dev_ds batch_size=200 return_list=<true> collate_fn=batchify_fn)<line_sep>test_loader=paddle.io.DataLoader(dataset=test_ds batch_size=200 return_list=<true> collate_fn=batchify_fn)<line_sep># Define the model netword and its loss model=ErnieForTokenClassification.from_pretrained("ernie-1.0" num_classes=len(label_vocab))<line_sep>metric=ChunkEvaluator(label_list=label_vocab.keys() suffix=<true>)<line_sep>loss_fn=paddle.nn.loss.CrossEntropyLoss(ignore_index=ignore_label)<line_sep>optimizer=paddle.optimizer.AdamW(learning_rate=2e-5 parameters=model.parameters())<line_sep>step=0<for_stmt>epoch range(10)# Switch the model to training mode <block_start>model.train()<for_stmt>idx,(input_ids token_type_ids length labels) enumerate(train_loader)<block_start>logits=model(input_ids token_type_ids)<line_sep>loss=paddle.mean(loss_fn(logits labels))<line_sep>loss.backward()<line_sep>optimizer.step()<line_sep>optimizer.clear_grad()<line_sep>step<augadd>1<line_sep>print("epoch:%d - step:%d - loss: %f"%(epoch step loss))<block_end>evaluate(model metric dev_loader)<line_sep>paddle.save(model.state_dict() './ernie_result/model_%d.pdparams'%step)<block_end># model.save_pretrained('./checkpoint') # tokenizer.save_pretrained('./checkpoint') preds=predict(model test_loader test_ds label_vocab)<line_sep>file_path="ernie_results.txt"<with_stmt>open(file_path "w" encoding="utf8")<as>fout<block_start>fout.write("\n".join(preds))<block_end># Print some examples print("The results have been saved in the file: %s, some examples are shown below: "%file_path)<line_sep>print("\n".join(preds[:10]))<line_sep>
<import_from_stmt>excel4lib.config *<import_from_stmt>excel4lib.config.excel4_translator_config Excel4MissingTranslationLevel<import_from_stmt>excel4lib.utils *<import_from_stmt>excel4lib.exception *<class_stmt>Excel4Translator(object)<block_start>''' `Excel4Translator` class allows to translate english formulas to another language. `Excel4Translator` stores translation files in the langs directory in .json format. Translation files have the following format: ``` { "arguments_separator": ",", "name": "LANG_NAME", "row_character": "ROW_CHARACTER", "col_character": "COL_CHARACTER", "translation": { "ENG_FORMULA":"TRANSLATION_FORMULA", (...) } } ``` - `arguments_separator` - stores character used to separate formula arguments; - `name` - stores the name of language. It should be the same as the file name, with no extension for example, pl_PL (then file name is pl_pl.json); - `row_character` - stores character used to translate ROW character in RC_STYLE; - `col_character` - stores character used to translate COLUMN character when RC_STYLE is used; - `translation` - stores formulas translations in form KEY:VALUE where KEY is formula in english and VALUE is translation of this formula to corresponding language '''<line_sep># Reference to configuration config=Excel4Config.translator<line_sep># Current language - the language into which the text is to be translated language=config.language<line_sep># Language from which translation is done # By default we use formulas in English # If you want to translate for example from Polish to English, then change Excel4Translator.native_language to pl_PL # and set Excel4Translator.language variable to en_US. Then create file en_US.json as translations file. # If Excel4Translator.language is equal to Excel4Translator.native_language then translation is not done native_language="en_US"<line_sep># Current language translations translations={native_language:{}}<line_sep># Default arguments separator. Returned when arguments_separator key is not defined in translations arguments_separator=","<line_sep># Default characters for rows and cols. row_character="R"<line_sep>col_character="C"<line_sep>@staticmethod<def_stmt>init <block_start>''' Initializes translator and loads `Excel4Translator.language` translation into memory. '''<line_sep>Excel4Translator.load_translations()<block_end>@staticmethod<def_stmt>check_translations <block_start>''' Checks if translations have required keys. If not then `Excel4RequiredKeyMissingException` is raised. '''<line_sep># Do not check if current language is equal to native <if_stmt>Excel4Translator.is_native()<block_start><return><block_end>req=["translation"]<line_sep>translations_path=join_path(Excel4Translator.config.translations_directory Excel4Translator.language+Excel4Translator.config.translations_ext)<for_stmt>k req<block_start><if_stmt>k<not><in>Excel4Translator.translations[Excel4Translator.language]<block_start><raise>Excel4RequiredKeyMissingException("{} key is missing in translations {}".format(k translations_path))<block_end><block_end><block_end>@staticmethod<def_stmt>load_translations lang=<none><block_start>''' Loads translation defined in `lang` into memory. If `lang` is None then `Excel4Translator.language` is loaded. If translation file does not exist or could not be found then `Excel4PathNotExistException` is raiesd. '''<line_sep># Do not load if current language is equal to native <if_stmt>(<not>lang)<and>Excel4Translator.is_native()<block_start><return><block_end><if_stmt><not>lang<block_start>lang=Excel4Translator.language<block_end><if_stmt>lang<in>Excel4Translator.translations<block_start><return><block_end>translations_path=join_path(Excel4Translator.config.translations_directory lang+Excel4Translator.config.translations_ext)<line_sep># Check if file with translations exists <if_stmt><not>is_path(translations_path)<block_start><raise>Excel4PathNotExistException("File with translations {} does not exist".format(translations_path))<block_end>Excel4Translator.translations[lang]=load_json_file(translations_path)<line_sep># Check if translations have all required keys Excel4Translator.check_translations()<block_end>@staticmethod<def_stmt>set_language lang<block_start>''' Sets current language (`Excel4Translator.langauge`) to `lang` and loads translation. :param lang: name of the language '''<line_sep># Save current language temp=Excel4Translator.language<line_sep>Excel4Translator.language=lang<try_stmt><block_start>Excel4Translator.load_translations()<block_end><except_stmt>Exception<as>ex# Restore language <block_start>Excel4Translator.language=temp<line_sep><raise>ex<block_end><block_end>@staticmethod<def_stmt>is_native <block_start>''' Checks if `Excel4Translator.language` is equal to `Excel4Translator.native_language` :return: True if yes and False if not '''<line_sep><return>Excel4Translator.language<eq>Excel4Translator.native_language<block_end>@staticmethod<def_stmt>translate formula lang=<none><block_start>''' Translates formula to `lang`. If `lang` is None then current language `Excel4Translator.language` is used. :param formula: name of formula to translate :param lang: name of the language :return: string translated formula '''<line_sep>lang_b=<none><line_sep># Init translations <if_stmt><not>Excel4Translator.translations<block_start>Excel4Translator.init()<block_end># If formula is empty or it contains spaces then do not translate <if_stmt>(<not>formula)<or>(" "<in>formula)<block_start><return>formula<block_end><if_stmt>lang<and>(lang<ne>Excel4Translator.language)<block_start>lang_b=Excel4Translator.language<line_sep>Excel4Translator.set_language(lang)<block_end># Do not translate if current language is equal to native <if_stmt>Excel4Translator.is_native()<block_start><return>formula<block_end><if_stmt><not>Excel4Translator.get_value("translation")<block_start><return><block_end><if_stmt>formula<not><in>Excel4Translator.translations[Excel4Translator.language]["translation"]# Raise exception if translation is missing <block_start><if_stmt>Excel4Translator.config.missing_translation<eq>Excel4MissingTranslationLevel.EXCEPTION<block_start>translations_path=join_path(Excel4Translator.config.translations_directory Excel4Translator.language+Excel4Translator.config.translations_ext)<line_sep><raise>Excel4TranslationMissingException("Translation of {} formula is missing in translations {} file".format(formula translations_path))<block_end># Print if translation is missing <elif_stmt>Excel4Translator.config.missing_translation<eq>Excel4MissingTranslationLevel.LOG<block_start>translations_path=join_path(Excel4Translator.config.translations_directory Excel4Translator.language+Excel4Translator.config.translations_ext)<line_sep>print("[!] Translation of {} formula is missing in translations {} file".format(formula translations_path))<block_end><return>formula<block_end>translation_f=Excel4Translator.translations[Excel4Translator.language]["translation"][formula]<if_stmt>lang_b<block_start>Excel4Translator.set_language(lang_b)<block_end><return>translation_f<block_end>@staticmethod<def_stmt>t formula lang=<none><block_start>''' Translates formula to `lang`. If `lang` is None then current language `Excel4Translator.language` is used. :param formula: name of formula to translate :param lang: name of the language :return: string translated formula '''<line_sep><return>Excel4Translator.translate(formula lang)<block_end>@staticmethod<def_stmt>translate_address address<block_start>''' Translates cell address :param address: address of cell to translate in RC_STYLE reference style :return: string translated address '''<line_sep># Init translations <if_stmt><not>Excel4Translator.translations<block_start>Excel4Translator.init()<block_end># Do not translate if current language is equal to native <if_stmt>Excel4Translator.is_native()<block_start><return>address<block_end># Do not translate if reference style is set to A1 <if_stmt><not>Excel4Config.rc_reference_style<block_start><return>address<block_end><return>address.replace(Excel4Translator.row_character Excel4Translator.get_row_character()).replace(Excel4Translator.col_character Excel4Translator.get_col_character())<block_end>@staticmethod<def_stmt>t_a address<block_start>''' Translates cell address :param address: address of cell to translate in RC_STYLE reference style :return: string translated address '''<line_sep><return>Excel4Translator.translate_address(address)<block_end>@staticmethod<def_stmt>get_value key_name<block_start>''' Returns value stored under `key_name` from `Excel4Translator.translations`. If key does not exist then `Excel4RequiredKeyMissingException` is raised. :param key_name: :return: value stored under `key_name` in `Excel4Translator.translations` object '''<if_stmt>key_name<not><in>Excel4Translator.translations[Excel4Translator.language]<block_start>translations_path=join_path(Excel4Translator.config.translations_directory Excel4Translator.language+Excel4Translator.config.translations_ext)<line_sep><raise>Excel4RequiredKeyMissingException("{} key is missing in translations {}".format(key_name translations_path))<block_end><return>Excel4Translator.translations[Excel4Translator.language][key_name]<block_end>@staticmethod<def_stmt>get_arguments_separator lang=<none><block_start>''' Returns arguments separator for `lang`. If `lang` is None then current lanauge is used (`Excel4Translator.language`). :param lang: name of the language '''<if_stmt>(<not>lang)<and>Excel4Translator.is_native()<block_start><return>Excel4Translator.arguments_separator<block_end><if_stmt><not>lang<block_start>lang=Excel4Translator.language<block_end><if_stmt>lang<not><in>Excel4Translator.translations<block_start>Excel4Translator.load_translations(lang)<block_end><return>Excel4Translator.translations[lang].get("arguments_separator" Excel4Translator.arguments_separator)<block_end>@staticmethod<def_stmt>get_row_character lang=<none><block_start>''' Returns row character for `lang`. If `lang` is None then current lanauge is used (`Excel4Translator.language`). :param lang: name of the language '''<if_stmt>(<not>lang)<and>Excel4Translator.is_native()<block_start><return>Excel4Translator.row_character<block_end><if_stmt><not>lang<block_start>lang=Excel4Translator.language<block_end><if_stmt>lang<not><in>Excel4Translator.translations<block_start>Excel4Translator.load_translations(lang)<block_end><return>Excel4Translator.translations[lang].get("row_character" Excel4Translator.row_character)<block_end>@staticmethod<def_stmt>get_col_character lang=<none><block_start>''' Returns column character for `lang`. If `lang` is None then current lanauge is used (`Excel4Translator.language`). :param lang: name of the language '''<if_stmt>(<not>lang)<and>Excel4Translator.is_native()<block_start><return>Excel4Translator.col_character<block_end><if_stmt><not>lang<block_start>lang=Excel4Translator.language<block_end><if_stmt>lang<not><in>Excel4Translator.translations<block_start>Excel4Translator.load_translations(lang)<block_end><return>Excel4Translator.translations[lang].get("col_character" Excel4Translator.col_character)<block_end>@staticmethod<def_stmt>get_languages <block_start>''' Returns list of available languages. '''<line_sep>translations_path=Excel4Translator.config.translations_directory<line_sep>langs=[]<for_stmt>l os.listdir(translations_path)<block_start><if_stmt>(Excel4Translator.config.translations_ext<eq>l.lower().split(".")[-1])<or>(Excel4Translator.config.translations_ext<eq>"."+l.lower().split(".")[-1])<block_start>langs.append(".".join(l.split(".")[:-1]))<block_end><block_end><return>langs<block_end><block_end>
<import_from_stmt>django forms<import_from_stmt>mayan.apps.views.forms DetailForm<import_from_stmt>.models UserLocaleProfile<class_stmt>LocaleProfileForm(forms.ModelForm)<block_start><class_stmt>Meta<block_start>fields=('language' 'timezone')<line_sep>model=UserLocaleProfile<line_sep>widgets={'language':forms.Select(attrs={'class':'select2'}) 'timezone':forms.Select(attrs={'class':'select2'})}<block_end><block_end><class_stmt>LocaleProfileForm_view(DetailForm)<block_start><class_stmt>Meta<block_start>fields=('language' 'timezone')<line_sep>model=UserLocaleProfile<block_end><block_end>
<import_stmt>unittest<import_from_stmt>dojo criar_fita main pinta_fita<class_stmt>DojoTest(unittest.TestCase)<block_start><def_stmt>test_main self<block_start>self.assertEqual(main(4 [1]) 3)<block_end><def_stmt>test_main_outro self<block_start>self.assertEqual(main(13 [2 3 6]) 3)<block_end><def_stmt>test_main_outro self<block_start>self.assertEqual(main(10 [9 10]) 8)<block_end><def_stmt>test_criar_fita self<block_start>fita=[1 0 0 0]<line_sep>self.assertListEqual(criar_fita(4 [1]) fita)<block_end><def_stmt>test_criar_fita_vazia self<block_start>self.assertListEqual(criar_fita(0 []) [])<block_end><def_stmt>test_criar_fita_3 self<block_start>fita=[1 1 0]<line_sep>self.assertListEqual(criar_fita(3 [1 2]) fita)<block_end><def_stmt>test_criar_fita_3 self<block_start>fita=[1 1 0]<line_sep>self.assertListEqual(criar_fita(3 [1 2]) fita)<block_end><def_stmt>test_pinta_fita self<block_start>fita=[1 1 0]<line_sep>fita_pintada=[1 1 1]<line_sep>self.assertListEqual(pinta_fita(fita) fita_pintada)<block_end><def_stmt>test_pinta_fita2 self<block_start>fita=[1 0 0 0]<line_sep>fita_pintada=[1 1 0 0]<line_sep>self.assertListEqual(pinta_fita(fita) fita_pintada)<block_end><def_stmt>test_pinta_fita2 self<block_start>fita=[0 1 0]<line_sep>fita_pintada=[1 1 1]<line_sep>self.assertListEqual(pinta_fita(fita) fita_pintada)<block_end><block_end># [0,1,0,0,0,1,0,0,0,0,0,1] <if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>#Joao - Ingrid - Lara - Juan - Tiago # n = tamanho da fita => [0,0,1,0,1] # x,y,x = as posições dos pingos # espera a quantidade de dias que vai demorar pra ela estar completamente preta # transforma input em array #funcao que pinta # while existe algum elemento = 0 no array # vamos pintando o antes e depois de todos os uns # 13 3 # 2 6 13 # 10 2 # 9 10
<import_stmt>numpy<as>np<import_stmt>matplotlib.pyplot<as>plt<line_sep>plt.style.use('fivethirtyeight')<def_stmt>plot_images images targets n_plot=30<block_start>n_rows=n_plot<floordiv>10+((n_plot%10)<g>0)<line_sep>fig,axes=plt.subplots(n_rows 10 figsize=(15 1.5<times>n_rows))<line_sep>axes=np.atleast_2d(axes)<for_stmt>i,(image target) enumerate(zip(images[:n_plot] targets[:n_plot]))<block_start>row,col=i<floordiv>10 i%10<line_sep>ax=axes[row col]<line_sep>ax.set_title('#{} - Label:{}'.format(i target) {'size':12})<line_sep># plot filter channel in grayscale ax.imshow(image.squeeze() cmap='gray' vmin=0 vmax=1)<block_end><for_stmt>ax axes.flat<block_start>ax.set_xticks([])<line_sep>ax.set_yticks([])<line_sep>ax.label_outer()<block_end>plt.tight_layout()<line_sep><return>fig<block_end>
<import_stmt>os<import_stmt>sys<import_stmt>h5py<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_stmt>networkx<as>nx<import_from_stmt>convert make_adjacency make_sparse_adjacency save_problem spadj2edgelist<line_sep>np.random.seed(123)<def_stmt>load_ages path<block_start>ages=pd.read_csv(path header=<none> sep='\t')<line_sep>ages.columns=('id' 'age')<line_sep>ages=ages[ages.age<ne>'null']<line_sep>ages.age=ages.age.astype(int)<line_sep>ages=ages[ages.age<g>0]<line_sep><return>ages<block_end>max_degree=128<line_sep>inpath='../data/pokec/'<line_sep># -- # Load data ages=load_ages(os.path.join(inpath 'soc-pokec-ages.tsv'))<line_sep>edges=pd.read_csv(os.path.join(inpath 'soc-pokec-relationships.txt') header=<none> sep='\t')<line_sep>edges.columns=('src' 'trg')<line_sep>edges=edges[edges.src.isin(ages.id)]<line_sep>edges=edges[edges.trg.isin(ages.id)]<line_sep>ages=ages[ages.id.isin(edges.src)|ages.id.isin(edges.trg)]<line_sep>ages['uid']=np.arange(ages.shape[0])<line_sep>edges=pd.merge(edges ages left_on='src' right_on='id')<line_sep>edges=edges[['uid' 'trg']]<line_sep>edges.columns=('src' 'trg')<line_sep>edges=pd.merge(edges ages left_on='trg' right_on='id')<line_sep>edges=edges[['src' 'uid']]<line_sep>edges.columns=('src' 'trg')<line_sep>ages=ages[['uid' 'age']]<line_sep>targets=np.array(ages.age).astype(float).reshape(-1 1)<line_sep>folds=np.random.choice(['train' 'val'] targets.shape[0] p=[0.5 0.5])<line_sep>G=nx.from_edgelist(np.array(edges))<line_sep># -- # Dense version adj=make_adjacency(G max_degree sel=<none>)# Adds dummy node aug_targets=np.vstack([targets np.zeros((targets.shape[1] ) dtype='float64')])<line_sep>aug_folds=np.hstack([folds ['dummy']])<line_sep>save_problem({"task":'regression_mae' "n_classes":<none> "feats":<none> "adj":adj "train_adj":adj "targets":aug_targets "folds":aug_folds } '../data/pokec/problem.h5')<line_sep>spadj=make_sparse_adjacency(G sel=<none>)<line_sep>aug_targets=np.vstack([np.zeros((targets.shape[1] ) dtype='float64') targets])<line_sep>aug_folds=np.hstack([['dummy'] folds])<line_sep>save_problem({"task":'regression_mae' "n_classes":<none> "feats":<none> "sparse":<true> "adj":spadj2edgelist(spadj) "train_adj":spadj2edgelist(spadj) "targets":aug_targets "folds":aug_folds } '../data/pokec/sparse-problem.h5')<line_sep>
# Copyright 2019 The Bazel Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Defines the emulator_toolchain rule to allow configuring emulator binaries to use."""<line_sep>EmulatorInfo=provider(doc="Information used to launch a specific version of the emulator." fields={"emulator":"A label for the emulator launcher executable at stable version." "emulator_deps":"Additional files required to launch the stable version of emulator." "emulator_head":"A label for the emulator launcher executable at head version." "emulator_head_deps":"Additional files required to launch the head version of emulator." } )<def_stmt>_emulator_toolchain_impl ctx<block_start>toolchain_info=platform_common.ToolchainInfo(info=EmulatorInfo(emulator=ctx.attr.emulator emulator_deps=ctx.attr.emulator_deps emulator_head=ctx.attr.emulator_head emulator_head_deps=ctx.attr.emulator_head_deps ) )<line_sep><return>[toolchain_info]<block_end>emulator_toolchain=rule(implementation=_emulator_toolchain_impl attrs={"emulator":attr.label(allow_files=<true> cfg="host" executable=<true> mandatory=<true> ) "emulator_deps":attr.label_list(allow_files=<true> cfg="host" ) "emulator_head":attr.label(allow_files=<true> cfg="host" executable=<true> ) "emulator_head_deps":attr.label_list(allow_files=<true> cfg="host" ) } )<line_sep>
# generated by datamodel-codegen: # filename: https://example.com/refs.yaml # timestamp: 2019-07-26T00:00:00+00:00 <import_from_future_stmt> annotations<import_from_stmt>typing Optional<import_from_stmt>pydantic AnyUrl BaseModel Field conint<class_stmt>Problem(BaseModel)<block_start>detail:Optional[str]=Field(<none> description='A human readable explanation specific to this occurrence of the\nproblem. You MUST NOT expose internal informations, personal\ndata or implementation details through this field.\n' example='Request took too long to complete.' )<line_sep>instance:Optional[AnyUrl]=Field(<none> description='An absolute URI that identifies the specific occurrence of the problem.\nIt may or may not yield further information if dereferenced.\n' )<line_sep>status:Optional[conint(ge=100 lt=600)]=Field(<none> description='The HTTP status code generated by the origin server for this occurrence\nof the problem.\n' example=503 )<line_sep>title:Optional[str]=Field(<none> description='A short, summary of the problem type. Written in english and readable\nfor engineers (usually not suited for non technical stakeholders and\nnot localized); example: Service Unavailable\n' )<line_sep>type:Optional[AnyUrl]=Field('about:blank' description='An absolute URI that identifies the problem type. When dereferenced,\nit SHOULD provide human-readable documentation for the problem type\n(e.g., using HTML).\n' example='https://tools.ietf.org/html/rfc7231#section-6.6.4' )<block_end>
# Copyright (c) 2020 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # <import_from_future_stmt> absolute_import<import_from_stmt>slimta.util.pycompat reprlib<line_sep>__all__=['log_repr' 'logline']<line_sep>log_repr=reprlib.Repr()<line_sep>log_repr.maxstring=100<line_sep>log_repr.maxother=100<def_stmt>logline log type typeid operation **data<block_start><if_stmt><not>data<block_start>log('{0}:{1}:{2}'.format(type typeid operation))<block_end><else_stmt><block_start>data_str=' '.join(['='.join((key log_repr.repr(val)))<for>key,val sorted(data.items())])<line_sep>log('{0}:{1}:{2} {3}'.format(type typeid operation data_str))<block_end><block_end>
# -*- coding: utf-8 -*- <import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>os<import_stmt>sys<import_stmt>unittest<import_stmt>shutil<line_sep># temporary solution for relative imports in case TDC is not installed # if TDC is installed, no need to use the following line sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__) '../../../')))<class_stmt>TestMolConvert(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>print(os.getcwd())<line_sep><pass><block_end><def_stmt>test_MolConvert self<block_start><import_from_stmt>tdc.chem_utils MolConvert<line_sep>converter=MolConvert(src='SMILES' dst='Graph2D')<line_sep>converter(['Clc1ccccc1C2C(=C(/N/C(=C2/C(=O)OCC)COCCN)C)\C(=O)OC' 'CCCOc1cc2ncnc(Nc3ccc4ncsc4c3)c2cc1S(=O)(=O)C(C)(C)C'])<import_from_stmt>tdc.chem_utils MolConvert<line_sep>MolConvert.eligible_format()<block_end><def_stmt>tearDown self<block_start>print(os.getcwd())<if_stmt>os.path.exists(os.path.join(os.getcwd() "data"))<block_start>shutil.rmtree(os.path.join(os.getcwd() "data"))<block_end><block_end><block_end>
"""Utilities for converting soundata Annotation classes to jams format. """<import_stmt>logging<import_stmt>os<import_from_stmt>typing Callable List<import_from_stmt>typing_extensions ParamSpecKwargs<import_stmt>jams<import_stmt>librosa<import_from_stmt>soundata annotations<def_stmt>jams_converter audio_path=<none> spectrogram_path=<none> metadata=<none> tags=<none> events=<none><block_start>"""Convert annotations from a clip to JAMS format. Args: audio_path (str or None): A path to the corresponding audio file, or None. If provided, the audio file will be read to compute the duration. If None, 'duration' must be a field in the metadata dictionary, or the resulting jam object will not validate. spectrogram_path (str or None): A path to the corresponding spectrum file, or None. tags (annotations.Tags or annotations.MultiAnnotator or None): An instance of annotations.Tags/annotations.MultiAnnotator describing the audio tags. events (annotations.Events or annotations.MultiAnnotator or None): An instance of annotations.Events/annotations.MultiAnnotator describing the sound events. Returns: jams.JAMS: A JAMS object containing the annotations. """<line_sep>jam=jams.JAMS()<line_sep># duration duration=<none><if_stmt>audio_path<is><not><none><block_start><if_stmt>os.path.exists(audio_path)<block_start>duration=librosa.get_duration(filename=audio_path)<block_end><else_stmt><block_start><raise>OSError("jams conversion failed because the audio file "+"for this clip cannot be found, and it is required "+"to compute duration.")<block_end><block_end><if_stmt>spectrogram_path<is><not><none><block_start><if_stmt>audio_path<is><none><block_start>duration=metadata["duration"]<block_end><block_end># metadata <if_stmt>metadata<is><not><none><block_start><for_stmt>key metadata<block_start><if_stmt>(key<eq>"duration"<and>duration<is><not><none><and>metadata[key]<ne>duration<and>audio_path<is><not><none>)<block_start>logging.warning("Duration provided in metadata does not"+"match the duration computed from the audio file."+"Using the duration provided by the metadata.")<block_end><if_stmt>metadata[key]<is><none><block_start><continue><block_end><if_stmt>hasattr(jam.file_metadata key)<block_start>setattr(jam.file_metadata key metadata[key])<block_end><else_stmt><block_start>setattr(jam.sandbox key metadata[key])<block_end><block_end><block_end><if_stmt>jam.file_metadata.duration<is><none><block_start>jam.file_metadata.duration=duration<block_end># soundata tags <if_stmt>tags<is><not><none><block_start><if_stmt>isinstance(tags annotations.Tags)<block_start>jam.annotations.append(tags_to_jams(tags duration=jam.file_metadata.duration))<block_end><elif_stmt>isinstance(tags annotations.MultiAnnotator)<block_start>jam.annotations.extend(multiannotator_to_jams(tags tags_to_jams))<block_end><else_stmt><block_start><raise>TypeError("tags should be of type annotations.Tags or annotations.MultiAnnotator")<block_end><block_end># soundata events <if_stmt>events<is><not><none><block_start><if_stmt>isinstance(events annotations.Events)<block_start>jam.annotations.append(events_to_jams(events))<block_end><elif_stmt>isinstance(events annotations.MultiAnnotator)<block_start>jam.annotations.extend(multiannotator_to_jams(events events_to_jams))<block_end><else_stmt><block_start><raise>TypeError("events should be of type annotations.Events or annotations.MultiAnnotator")<block_end><block_end><return>jam<block_end><def_stmt>multiannotator_to_jams multiannot:annotations.MultiAnnotator converter:Callable[<ellipsis> annotations.Annotation] **kwargs <arrow>List[jams.Annotation]<block_start>"""Convert tags annotations into jams format. Args: tags (annotations.MultiAnnotator): MultiAnnotator object converter (Callable[..., annotations.Annotation]): a function that takes an annotation object, its annotator, (and other optional arguments), and return a jams annotation object Returns: List[jams.Annotation]: List of jams annotation objects. """<line_sep>jams_annot=[]<for_stmt>annotator,annotation zip(multiannot.annotators multiannot.annotations)<block_start>jams_annot.append(converter(annotation annotator=annotator **kwargs))<block_end><return>jams_annot<block_end><def_stmt>tags_to_jams tags annotator=<none> duration=0 namespace="tag_open" description=<none><block_start>"""Convert tags annotations into jams format. Args: tags (annotations.Tags): tags annotation object annotator (str): annotator id namespace (str): the jams-compatible tag namespace description (str): annotation description Returns: jams.Annotation: jams annotation object. """<line_sep>ann=jams.Annotation(namespace=namespace)<line_sep>ann.annotation_metadata=jams.AnnotationMetadata(data_source="soundata" annotator={"id":annotator}<if>annotator<is><not><none><else><none> )<for_stmt>t,c zip(tags.labels tags.confidence)<block_start>ann.append(time=0.0 duration=duration value=t confidence=c)<block_end><if_stmt>description<is><not><none><block_start>ann.sandbox=jams.Sandbox(name=description)<block_end><return>ann<block_end><def_stmt>events_to_jams events annotator=<none> description=<none><block_start>"""Convert events annotations into jams format. Args: events (annotations.Events): events data object annotator (str): annotator id description (str): annotation description Returns: jams.Annotation: jams annotation object. """<line_sep>jannot_events=jams.Annotation(namespace="segment_open")<line_sep>jannot_events.annotation_metadata=jams.AnnotationMetadata(data_source="soundata" annotator={"id":annotator}<if>annotator<is><not><none><else><none> )<for_stmt>inter,label,conf zip(events.intervals events.labels events.confidence)<block_start>jannot_events.append(time=inter[0] duration=inter[1]-inter[0] value=label confidence=conf)<block_end><if_stmt>description<is><not><none><block_start>jannot_events.sandbox=jams.Sandbox(name=description)<block_end><return>jannot_events<block_end>
<import_stmt>unittest<import_from_stmt>rx empty never throw operators<as>_<import_from_stmt>rx.testing TestScheduler ReactiveTest<line_sep>on_next=ReactiveTest.on_next<line_sep>on_completed=ReactiveTest.on_completed<line_sep>on_error=ReactiveTest.on_error<line_sep>subscribe=ReactiveTest.subscribe<line_sep>subscribed=ReactiveTest.subscribed<line_sep>disposed=ReactiveTest.disposed<line_sep>created=ReactiveTest.created<class_stmt>RxException(Exception)<block_start><pass><block_end># Helper function for raising exceptions within lambdas <def_stmt>_raise ex<block_start><raise>RxException(ex)<block_end><class_stmt>TestDebounce(unittest.TestCase)<block_start><def_stmt>test_debounce_timespan_allpass self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(200 2) on_next(250 3) on_next(300 4) on_next(350 5) on_next(400 6) on_next(450 7) on_next(500 8) on_completed(550))<def_stmt>create <block_start><return>xs.pipe(_.debounce(40))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(290 3) on_next(340 4) on_next(390 5) on_next(440 6) on_next(490 7) on_next(540 8) on_completed(550)]<block_end><def_stmt>test_debounce_timespan_allpass_error_end self<block_start>ex='ex'<line_sep>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(200 2) on_next(250 3) on_next(300 4) on_next(350 5) on_next(400 6) on_next(450 7) on_next(500 8) on_error(550 ex))<def_stmt>create <block_start><return>xs.pipe(_.debounce(40))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(290 3) on_next(340 4) on_next(390 5) on_next(440 6) on_next(490 7) on_next(540 8) on_error(550 ex)]<block_end><def_stmt>test_debounce_timespan_alldrop self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(200 2) on_next(250 3) on_next(300 4) on_next(350 5) on_next(400 6) on_next(450 7) on_next(500 8) on_completed(550))<def_stmt>create <block_start><return>xs.pipe(_.debounce(60))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(550 8) on_completed(550)]<block_end><def_stmt>test_debounce_timespan_alldrop_error_end self<block_start>ex='ex'<line_sep>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(200 2) on_next(250 3) on_next(300 4) on_next(350 5) on_next(400 6) on_next(450 7) on_next(500 8) on_error(550 ex))<def_stmt>create <block_start><return>xs.pipe(_.debounce(60))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_error(550 ex)]<block_end><def_stmt>test_debounce_timespan_some_drop self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(250 2) on_next(350 3) on_next(370 4) on_next(421 5) on_next(480 6) on_next(490 7) on_next(500 8) on_completed(600))<def_stmt>create <block_start><return>xs.pipe(_.debounce(50))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(300 2) on_next(420 4) on_next(471 5) on_next(550 8) on_completed(600)]<block_end><def_stmt>test_debounce_empty self<block_start>scheduler=TestScheduler()<def_stmt>create <block_start><return>empty().pipe(_.debounce(10))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_completed(200)]<block_end><def_stmt>test_debounce_error self<block_start>ex='ex'<line_sep>scheduler=TestScheduler()<def_stmt>create <block_start><return>throw(ex).pipe(_.debounce(10))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_error(200 ex)]<block_end><def_stmt>test_debounce_never self<block_start>scheduler=TestScheduler()<def_stmt>create <block_start><return>never().pipe(_.debounce(10))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[]<block_end><def_stmt>test_debounce_duration_delay_behavior self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 -1) on_next(250 0) on_next(280 1) on_next(310 2) on_next(350 3) on_next(400 4) on_completed(550))<line_sep>ys=[scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99))]<def_stmt>create <block_start><def_stmt>mapper x<block_start><return>ys[x]<block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+20 0) on_next(280+20 1) on_next(310+20 2) on_next(350+20 3) on_next(400+20 4) on_completed(550)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 550)]<assert_stmt>ys[0].subscriptions<eq>[subscribe(250 250+20)]<assert_stmt>ys[1].subscriptions<eq>[subscribe(280 280+20)]<assert_stmt>ys[2].subscriptions<eq>[subscribe(310 310+20)]<assert_stmt>ys[3].subscriptions<eq>[subscribe(350 350+20)]<assert_stmt>ys[4].subscriptions<eq>[subscribe(400 400+20)]<block_end><def_stmt>test_debounce_duration_throttle_behavior self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 -1) on_next(250 0) on_next(280 1) on_next(310 2) on_next(350 3) on_next(400 4) on_completed(550))<line_sep>ys=[scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(40 42) on_next(45 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(60 42) on_next(65 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99))]<def_stmt>create <block_start><def_stmt>mapper x<block_start><return>ys[x]<block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+20 0) on_next(310+20 2) on_next(400+20 4) on_completed(550)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 550)]<assert_stmt>ys[0].subscriptions<eq>[subscribe(250 250+20)]<assert_stmt>ys[1].subscriptions<eq>[subscribe(280 310)]<assert_stmt>ys[2].subscriptions<eq>[subscribe(310 310+20)]<assert_stmt>ys[3].subscriptions<eq>[subscribe(350 400)]<assert_stmt>ys[4].subscriptions<eq>[subscribe(400 400+20)]<block_end><def_stmt>test_debounce_duration_early_completion self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 -1) on_next(250 0) on_next(280 1) on_next(310 2) on_next(350 3) on_next(400 4) on_completed(410))<line_sep>ys=[scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(40 42) on_next(45 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99)) scheduler.create_cold_observable(on_next(60 42) on_next(65 99)) scheduler.create_cold_observable(on_next(20 42) on_next(25 99))]<def_stmt>create <block_start><def_stmt>mapper x<block_start><return>ys[x]<block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+20 0) on_next(310+20 2) on_next(410 4) on_completed(410)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 410)]<assert_stmt>ys[0].subscriptions<eq>[subscribe(250 250+20)]<assert_stmt>ys[1].subscriptions<eq>[subscribe(280 310)]<assert_stmt>ys[2].subscriptions<eq>[subscribe(310 310+20)]<assert_stmt>ys[3].subscriptions<eq>[subscribe(350 400)]<assert_stmt>ys[4].subscriptions<eq>[subscribe(400 410)]<block_end><def_stmt>test_debounce_duration_inner_error self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(250 2) on_next(350 3) on_next(450 4) on_completed(550))<line_sep>ex='ex'<def_stmt>create <block_start><def_stmt>mapper x<block_start><if_stmt>x<l>4<block_start><return>scheduler.create_cold_observable(on_next(x<times>10 "Ignore") on_next(x<times>10+5 "Aargh!"))<block_end><else_stmt><block_start><return>scheduler.create_cold_observable(on_error(x<times>10 ex))<block_end><block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+2<times>10 2) on_next(350+3<times>10 3) on_error(450+4<times>10 ex)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 490)]<block_end><def_stmt>test_debounce_duration_outer_error self<block_start>ex='ex'<line_sep>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(250 2) on_next(350 3) on_next(450 4) on_error(460 ex))<def_stmt>create <block_start><def_stmt>mapper x<block_start><return>scheduler.create_cold_observable(on_next(x<times>10 "Ignore") on_next(x<times>10+5 "Aargh!"))<block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+2<times>10 2) on_next(350+3<times>10 3) on_error(460 ex)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 460)]<block_end><def_stmt>test_debounce_duration_mapper_throws self<block_start>ex='ex'<line_sep>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(250 2) on_next(350 3) on_next(450 4) on_completed(550))<def_stmt>create <block_start><def_stmt>mapper x<block_start><if_stmt>x<l>4<block_start><return>scheduler.create_cold_observable(on_next(x<times>10 "Ignore") on_next(x<times>10+5 "Aargh!"))<block_end><else_stmt><block_start>_raise(ex)<block_end><block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+2<times>10 2) on_next(350+3<times>10 3) on_error(450 ex)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 450)]<block_end><def_stmt>test_debounce_duration_inner_done_delay_behavior self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(250 2) on_next(350 3) on_next(450 4) on_completed(550))<def_stmt>create <block_start><def_stmt>mapper x<block_start><return>scheduler.create_cold_observable(on_completed(x<times>10))<block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+2<times>10 2) on_next(350+3<times>10 3) on_next(450+4<times>10 4) on_completed(550)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 550)]<block_end><def_stmt>test_debounce_duration_inner_done_throttle_behavior self<block_start>scheduler=TestScheduler()<line_sep>xs=scheduler.create_hot_observable(on_next(150 1) on_next(250 2) on_next(280 3) on_next(300 4) on_next(400 5) on_next(410 6) on_completed(550))<def_stmt>create <block_start><def_stmt>mapper x<block_start><return>scheduler.create_cold_observable(on_completed(x<times>10))<block_end><return>xs.pipe(_.throttle_with_mapper(mapper))<block_end>results=scheduler.start(create)<assert_stmt>results.messages<eq>[on_next(250+2<times>10 2) on_next(300+4<times>10 4) on_next(410+6<times>10 6) on_completed(550)]<assert_stmt>xs.subscriptions<eq>[subscribe(200 550)]<block_end><block_end>
<import_from_stmt>curlylint ast<import_from_stmt>curlylint.check_node CheckNode build_tree<import_from_stmt>curlylint.issue Issue<line_sep>META_VIEWPORT="meta_viewport"<line_sep>RULE={"id":"meta_viewport" "type":"accessibility" "docs":{"description":"The `viewport` meta tag should not use `user-scalable=no`, and `maximum-scale` should be 2 or above, so end users can zoom" "url":"https://www.curlylint.org/docs/rules/meta_viewport" "impact":"Critical" "tags":["cat.language" "wcag2aa" "wcag144"] "resources":["[Understanding WCAG SC 1.4.4 Resize Text](http://www.w3.org/TR/UNDERSTANDING-WCAG20/visual-audio-contrast-scale.html)" "[axe-core, meta-viewport](https://dequeuniversity.com/rules/axe/3.5/meta-viewport)" ] } "schema":{"$schema":"http://json-schema.org/draft/2019-09/schema#" "oneOf":[{"const":<true> "title":"`user-scalable=no` must not be used, and `maximum-scale` should be 2 or above." "examples":[<true>] } ] } }<def_stmt>find_valid node file<block_start>name=getattr(node.value "name" <none>)<line_sep>is_meta=(isinstance(node.value ast.Element)<and>name<and>name.lower()<eq>"meta")<if_stmt>is_meta<block_start>attributes=[]<if_stmt>getattr(node.value "opening_tag" <none>)<block_start>attributes={}<for_stmt>n node.value.opening_tag.attributes.nodes<block_start>attributes[str(n.name)]=str(n.value).strip("\"'")<block_end><block_end><if_stmt>"name"<in>attributes<and>attributes["name"]<eq>"viewport"<block_start><if_stmt>"user-scalable=no"<in>attributes["content"]<block_start><return>[Issue.from_node(file node "Remove `user-scalable=no` from the viewport meta so users can zoom" "meta_viewport" )]<block_end><if_stmt>("maximum-scale=1"<in>attributes["content"]<or>"maximum-scale=0"<in>attributes["content"])<block_start><return>[Issue.from_node(file node "`maximum-scale` should not be less than 2" "meta_viewport" )]<block_end><block_end><block_end><if_stmt><not>node.children<block_start><return>[]<block_end><return>sum((find_valid(child file)<for>child node.children) [])<block_end><def_stmt>meta_viewport file config<block_start>root=CheckNode(<none>)<line_sep>build_tree(root file.tree)<line_sep>src=file.source.lower()<if_stmt>r"user-scalable"<in>src<or>r"maximum-scale"<in>src<block_start><return>find_valid(root file)<block_end><return>[]<block_end>
# --- # jupyter: # jupytext: # formats: ipynb,py:light # text_representation: # extension: .py # format_name: light # format_version: '1.5' # jupytext_version: 1.3.3 # kernelspec: # display_name: Python 3 # language: python # name: python3 # --- # # Materials # # This test showcases rendering with various materials provided by Lightmetrica. We render the images using ``renderer::pt``. # %load_ext autoreload # %autoreload 2 <import_stmt>lmenv<line_sep>env=lmenv.load('.lmenv')<import_stmt>os<import_stmt>pickle<import_stmt>json<import_stmt>numpy<as>np<import_stmt>matplotlib.pyplot<as>plt<import_stmt>lightmetrica<as>lm<line_sep># %load_ext lightmetrica_jupyter <import_stmt>lmscene<line_sep>lm.init()<line_sep>lm.log.init('jupyter')<line_sep>lm.progress.init('jupyter')<line_sep>lm.info()<line_sep>lm.comp.load_plugin(os.path.join(env.bin_path 'accel_embree'))<if_stmt><not>lm.Release<block_start>lm.parallel.init('openmp' num_threads=1)<line_sep>lm.debug.attach_to_debugger()<block_end># + <def_stmt>render scene name **kwargs<block_start>w=854<line_sep>h=480<line_sep>film=lm.load_film('film' 'bitmap' w=w h=h)<line_sep>renderer=lm.load_renderer('renderer' name scene=scene output=film max_verts=20 scheduler='time' render_time=30 **kwargs)<line_sep>renderer.render()<line_sep><return>np.copy(film.buffer())<block_end><def_stmt>display_image img fig_size=15 scale=1<block_start>f=plt.figure(figsize=(fig_size fig_size))<line_sep>ax=f.add_subplot(111)<line_sep>ax.imshow(np.clip(np.power(img<times>scale 1/2.2) 0 1) origin='lower')<line_sep>ax.axis('off')<line_sep>plt.show()<block_end># - # ## Scene setup # Create scene accel=lm.load_accel('accel' 'embree')<line_sep>scene=lm.load_scene('scene' 'default' accel=accel)<line_sep>mat=lm.load_material('mat_ut' 'diffuse' Kd=[1 1 1])<line_sep>lmscene.bunny_with_area_light(scene env.scene_path mat_knob=mat)<line_sep>scene.build()<line_sep># ## Rendering # ### Diffse material # # `material::diffuse` lm.load_material('mat_ut' 'diffuse' Kd=[.8 .2 .2])<line_sep>img=render(scene 'pt')<line_sep>display_image(img)<line_sep># ### Glossy material # # `material::glossy` lm.load_material('mat_ut' 'glossy' Ks=[.8 .2 .2] ax=0.2 ay=0.2)<line_sep>img=render(scene 'pt')<line_sep>display_image(img)<line_sep># ### Perfect specular reflection # # `material::mirror` lm.load_material('mat_ut' 'mirror')<line_sep>img=render(scene 'pt')<line_sep>display_image(img)<line_sep># ### Fresnel reflection / refraction # # `material::fresnel` lm.load_material('mat_ut' 'glass' Ni=1.5)<line_sep>img=render(scene 'pt')<line_sep>display_image(img)<line_sep># ### Mixture material with constant weights using RR # # `material::constant_weight_mixture_rr` mat_diffuse=lm.load_material('mat_diffuse' 'diffuse' Kd=[.1 .8 .1])<line_sep>mat_glossy=lm.load_material('mat_glossy' 'glossy' Ks=[.8 .1 .1] ax=0.2 ay=0.2)<line_sep>mat_mirror=lm.load_material('mat_mirror' 'mirror')<line_sep>mat=lm.load_material('mat_ut' 'constant_weight_mixture_rr' [{'material':mat_diffuse.loc() 'weight':0.2} {'material':mat_glossy.loc() 'weight':0.4} {'material':mat_mirror.loc() 'weight':0.4}])<line_sep>img=render(scene 'pt')<line_sep>display_image(img)<line_sep># ### Mixture material with constant weights using marginalization # # `material::constant_weight_mixture_marginalized` mat=lm.load_material('mat_ut' 'constant_weight_mixture_marginalized' [{'material':mat_diffuse.loc() 'weight':0.2} {'material':mat_glossy.loc() 'weight':0.4} {'material':mat_mirror.loc() 'weight':0.4}])<line_sep>img=render(scene 'pt')<line_sep>display_image(img)<line_sep># ### Mixture material with alpha texture # # `material::mixture_wavefrontobj` # # This material is the default material converted from MTL format of Wavefront OBJ. tex=lm.load_texture('tex' 'bitmap' path=os.path.join(env.scene_path 'fireplace_room' 'textures' 'leaf.png'))<line_sep>lm.load_material('mat_ut' 'mixture_wavefrontobj' Kd=[.8 .8 .8] mapKd=tex Ks=[0 0 0] ax=0.2 ay=0.2 no_alpha_mask=<false>)<line_sep>img=render(scene 'pt')<line_sep>display_image(img)<line_sep>
# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # (see spyder/__init__.py for details) """Language Server Protocol message boxes."""<line_sep># Standard library imports <import_stmt>os<line_sep># Third party imports <import_from_stmt>qtpy.QtCore Signal<import_from_stmt>qtpy.QtWidgets QMessageBox<line_sep># Local imports <import_from_stmt>spyder.config.base _<import_from_stmt>spyder.widgets.helperwidgets MessageCheckBox<class_stmt>ServerDisabledMessageBox(MessageCheckBox)<block_start>sig_restart_spyder=Signal()<def_stmt>__init__ self parent warn_str set_conf<block_start>super().__init__(icon=QMessageBox.Warning parent=parent)<line_sep>self.set_conf=set_conf<line_sep>self.setWindowTitle(_("Warning"))<line_sep>self.set_checkbox_text(_("Don't show again"))<line_sep>self.setStandardButtons(QMessageBox.Yes|QMessageBox.No)<line_sep>self.setDefaultButton(QMessageBox.No)<line_sep>self.set_checked(<false>)<line_sep>self.set_check_visible(<true>)<line_sep>self.setText(warn_str)<block_end><def_stmt>exec_ self<block_start>answer=super().exec_()<line_sep>self.set_conf('show_lsp_down_warning' <not>self.is_checked())<if_stmt>answer<eq>QMessageBox.Yes<block_start>self.sig_restart_spyder.emit()<block_end><block_end>@classmethod<def_stmt>instance cls warn_str set_conf<block_start><def_stmt>wrapper parent<block_start><return>cls(parent warn_str set_conf)<block_end><return>wrapper<block_end><block_end>
<import_stmt>esphome.codegen<as>cg<import_stmt>esphome.config_validation<as>cv<import_from_stmt>esphome automation<import_from_stmt>esphome.automation Condition maybe_simple_id<import_from_stmt>esphome.components mqtt<import_from_stmt>esphome.const CONF_ID CONF_ON_LOCK CONF_ON_UNLOCK CONF_TRIGGER_ID CONF_MQTT_ID <import_from_stmt>esphome.core CORE coroutine_with_priority<import_from_stmt>esphome.cpp_helpers setup_entity<line_sep>CODEOWNERS=["@esphome/core"]<line_sep>IS_PLATFORM_COMPONENT=<true><line_sep>lock_ns=cg.esphome_ns.namespace("lock")<line_sep>Lock=lock_ns.class_("Lock" cg.EntityBase)<line_sep>LockPtr=Lock.operator("ptr")<line_sep>LockCall=lock_ns.class_("LockCall")<line_sep>UnlockAction=lock_ns.class_("UnlockAction" automation.Action)<line_sep>LockAction=lock_ns.class_("LockAction" automation.Action)<line_sep>OpenAction=lock_ns.class_("OpenAction" automation.Action)<line_sep>LockPublishAction=lock_ns.class_("LockPublishAction" automation.Action)<line_sep>LockCondition=lock_ns.class_("LockCondition" Condition)<line_sep>LockLockTrigger=lock_ns.class_("LockLockTrigger" automation.Trigger.template())<line_sep>LockUnlockTrigger=lock_ns.class_("LockUnlockTrigger" automation.Trigger.template())<line_sep>LOCK_SCHEMA=cv.ENTITY_BASE_SCHEMA.extend(cv.MQTT_COMMAND_COMPONENT_SCHEMA).extend({cv.OnlyWith(CONF_MQTT_ID "mqtt"):cv.declare_id(mqtt.MQTTLockComponent) cv.Optional(CONF_ON_LOCK):automation.validate_automation({cv.GenerateID(CONF_TRIGGER_ID):cv.declare_id(LockLockTrigger) }) cv.Optional(CONF_ON_UNLOCK):automation.validate_automation({cv.GenerateID(CONF_TRIGGER_ID):cv.declare_id(LockUnlockTrigger) }) })<async_keyword><def_stmt>setup_lock_core_ var config<block_start><await>setup_entity(var config)<for_stmt>conf config.get(CONF_ON_LOCK [])<block_start>trigger=cg.new_Pvariable(conf[CONF_TRIGGER_ID] var)<line_sep><await>automation.build_automation(trigger [] conf)<block_end><for_stmt>conf config.get(CONF_ON_UNLOCK [])<block_start>trigger=cg.new_Pvariable(conf[CONF_TRIGGER_ID] var)<line_sep><await>automation.build_automation(trigger [] conf)<block_end><if_stmt>CONF_MQTT_ID<in>config<block_start>mqtt_=cg.new_Pvariable(config[CONF_MQTT_ID] var)<line_sep><await>mqtt.register_mqtt_component(mqtt_ config)<block_end><block_end><async_keyword><def_stmt>register_lock var config<block_start><if_stmt><not>CORE.has_id(config[CONF_ID])<block_start>var=cg.Pvariable(config[CONF_ID] var)<block_end>cg.add(cg.App.register_lock(var))<line_sep><await>setup_lock_core_(var config)<block_end>LOCK_ACTION_SCHEMA=maybe_simple_id({cv.Required(CONF_ID):cv.use_id(Lock) })<line_sep>@automation.register_action("lock.unlock" UnlockAction LOCK_ACTION_SCHEMA)@automation.register_action("lock.lock" LockAction LOCK_ACTION_SCHEMA)@automation.register_action("lock.open" OpenAction LOCK_ACTION_SCHEMA)<async_keyword><def_stmt>lock_action_to_code config action_id template_arg args<block_start>paren=<await>cg.get_variable(config[CONF_ID])<line_sep><return>cg.new_Pvariable(action_id template_arg paren)<block_end>@automation.register_condition("lock.is_locked" LockCondition LOCK_ACTION_SCHEMA)<async_keyword><def_stmt>lock_is_on_to_code config condition_id template_arg args<block_start>paren=<await>cg.get_variable(config[CONF_ID])<line_sep><return>cg.new_Pvariable(condition_id template_arg paren <true>)<block_end>@automation.register_condition("lock.is_unlocked" LockCondition LOCK_ACTION_SCHEMA)<async_keyword><def_stmt>lock_is_off_to_code config condition_id template_arg args<block_start>paren=<await>cg.get_variable(config[CONF_ID])<line_sep><return>cg.new_Pvariable(condition_id template_arg paren <false>)<block_end>@coroutine_with_priority(100.0)<async_keyword><def_stmt>to_code config<block_start>cg.add_global(lock_ns.using)<line_sep>cg.add_define("USE_LOCK")<block_end>
<import_stmt>asyncio<import_from_stmt>collections namedtuple<line_sep>__all__=['MemcachePool']<line_sep>_connection=namedtuple('connection' ['reader' 'writer'])<class_stmt>MemcachePool<block_start><def_stmt>__init__ self host port * minsize maxsize loop=<none><block_start>loop=loop<if>loop<is><not><none><else>asyncio.get_event_loop()<line_sep>self._host=host<line_sep>self._port=port<line_sep>self._minsize=minsize<line_sep>self._maxsize=maxsize<line_sep>self._loop=loop<line_sep>self._pool=asyncio.Queue(loop=loop)<line_sep>self._in_use=set()<block_end>@asyncio.coroutine<def_stmt>clear self<block_start>"""Clear pool connections."""<while_stmt><not>self._pool.empty()<block_start>conn=<yield><from>self._pool.get()<line_sep>self._do_close(conn)<block_end><block_end><def_stmt>_do_close self conn<block_start>conn.reader.feed_eof()<line_sep>conn.writer.close()<block_end>@asyncio.coroutine<def_stmt>acquire self<block_start>"""Acquire connection from the pool, or spawn new one if pool maxsize permits. :return: ``tuple`` (reader, writer) """<while_stmt>self.size()<eq>0<or>self.size()<l>self._minsize<block_start>_conn=<yield><from>self._create_new_conn()<if_stmt>_conn<is><none><block_start><break><block_end>self._pool.put_nowait(_conn)<block_end>conn=<none><while_stmt><not>conn<block_start>_conn=<yield><from>self._pool.get()<if_stmt>_conn.reader.at_eof()<or>_conn.reader.exception()<block_start>self._do_close(_conn)<line_sep>conn=<yield><from>self._create_new_conn()<block_end><else_stmt><block_start>conn=_conn<block_end><block_end>self._in_use.add(conn)<line_sep><return>conn<block_end><def_stmt>release self conn<block_start>"""Releases connection back to the pool. :param conn: ``namedtuple`` (reader, writer) """<line_sep>self._in_use.remove(conn)<if_stmt>conn.reader.at_eof()<or>conn.reader.exception()<block_start>self._do_close(conn)<block_end><else_stmt><block_start>self._pool.put_nowait(conn)<block_end><block_end>@asyncio.coroutine<def_stmt>_create_new_conn self<block_start><if_stmt>self.size()<l>self._maxsize<block_start>reader,writer=<yield><from>asyncio.open_connection(self._host self._port loop=self._loop)<if_stmt>self.size()<l>self._maxsize<block_start><return>_connection(reader writer)<block_end><else_stmt><block_start>reader.feed_eof()<line_sep>writer.close()<line_sep><return><none><block_end><block_end><else_stmt><block_start><return><none><block_end><block_end><def_stmt>size self<block_start><return>self._pool.qsize()+len(self._in_use)<block_end><block_end>
<import_stmt>os<import_stmt>pytest<import_stmt>py<line_sep>ufoLib2=pytest.importorskip("ufoLib2")<import_from_stmt>fontTools.cu2qu.ufo CURVE_TYPE_LIB_KEY<import_from_stmt>fontTools.cu2qu.cli main<line_sep>DATADIR=os.path.join(os.path.dirname(__file__) 'data')<line_sep>TEST_UFOS=[py.path.local(DATADIR).join("RobotoSubset-Regular.ufo") py.path.local(DATADIR).join("RobotoSubset-Bold.ufo") ]<line_sep>@pytest.fixture<def_stmt>test_paths tmpdir<block_start>result=[]<for_stmt>path TEST_UFOS<block_start>new_path=tmpdir/path.basename<line_sep>path.copy(new_path)<line_sep>result.append(new_path)<block_end><return>result<block_end><class_stmt>MainTest(object)<block_start>@staticmethod<def_stmt>run_main *args<block_start>main([str(p)<for>p args<if>p])<block_end><def_stmt>test_single_input_no_output self test_paths<block_start>ufo_path=test_paths[0]<line_sep>self.run_main(ufo_path)<line_sep>font=ufoLib2.Font.open(ufo_path)<assert_stmt>font.lib[CURVE_TYPE_LIB_KEY]<eq>"quadratic"<block_end><def_stmt>test_single_input_output_file self tmpdir<block_start>input_path=TEST_UFOS[0]<line_sep>output_path=tmpdir/input_path.basename<line_sep>self.run_main('-o' output_path input_path)<assert_stmt>output_path.check(dir=1)<block_end><def_stmt>test_multiple_inputs_output_dir self tmpdir<block_start>output_dir=tmpdir/"output_dir"<line_sep>self.run_main('-d' output_dir *TEST_UFOS)<assert_stmt>output_dir.check(dir=1)<line_sep>outputs=set(p.basename<for>p output_dir.listdir())<assert_stmt>"RobotoSubset-Regular.ufo"<in>outputs<assert_stmt>"RobotoSubset-Bold.ufo"<in>outputs<block_end><def_stmt>test_interpolatable_inplace self test_paths<block_start>self.run_main('-i' *test_paths)<line_sep>self.run_main('-i' *test_paths)<block_end># idempotent @pytest.mark.parametrize("mode" ["" "-i"] ids=["normal" "interpolatable"])<def_stmt>test_copytree self mode tmpdir<block_start>output_dir=tmpdir/"output_dir"<line_sep>self.run_main(mode '-d' output_dir *TEST_UFOS)<line_sep>output_dir_2=tmpdir/"output_dir_2"<line_sep># no conversion when curves are already quadratic, just copy self.run_main(mode '-d' output_dir_2 *output_dir.listdir())<line_sep># running again overwrites existing with the copy self.run_main(mode '-d' output_dir_2 *output_dir.listdir())<block_end><def_stmt>test_multiprocessing self tmpdir test_paths<block_start>self.run_main(*(test_paths+["-j"]))<block_end><def_stmt>test_keep_direction self test_paths<block_start>self.run_main('--keep-direction' *test_paths)<block_end><def_stmt>test_conversion_error self test_paths<block_start>self.run_main('--conversion-error' 0.002 *test_paths)<block_end><def_stmt>test_conversion_error_short self test_paths<block_start>self.run_main('-e' 0.003 test_paths[0])<block_end><block_end>
<import_from_stmt>.one_hot one_hot<import_from_stmt>.tensor_type TensorType<import_from_stmt>.list_recursive_subclasses list_recursive_concrete_subclasses<import_from_stmt>.parse parse_loss parse_activation parse_metric parse_optimizer<import_from_stmt>.average_meter AverageMeter<import_from_stmt>.timer Timer<import_from_stmt>.early_stopping EarlyStopping<import_from_stmt>.get_file get_file _hash_file<line_sep>
# -*- coding: utf-8 -*- """ transistor.workers.workgroup ~~~~~~~~~~~~ This module implements WorkGroup. See transistor.workers.__init__ for more notes on this module. :copyright: Copyright (C) 2018 by BOM Quote Limited :license: The MIT License, see LICENSE for more details. ~~~~~~~~~~~~ """<import_from_stmt>typing NamedTuple Type Union List Optional<import_from_stmt>transistor.workers.basegroup BaseGroup<import_from_stmt>transistor.persistence.loader ItemLoader<import_from_stmt>transistor.persistence.exporters.base BaseItemExporter<import_from_stmt>transistor.persistence.containers Item<import_from_stmt>transistor.scrapers.splash_scraper_abc SplashScraper<import_from_stmt>transistor.workers.baseworker BaseWorker<import_from_stmt>transistor.schedulers.books.bookstate StatefulBook<class_stmt>WorkGroup(NamedTuple)<block_start>""" A container class to use when starting up a WorkGroupManager. Intended use is, like below: >>> groups = [ >>> WorkGroup(class_=MouseKeyGroup, workers=2, name='mousekey.cn', >>> kwargs={'china':True, 'timeout': (3.0, 3.0)}), >>> >>> WorkGroup(class_=MouseKeyGroup, workers=2, name='mousekey.com', >>> kwargs={'timeout':(3.0, 3.0)}) >>> ] >>> manager = WorkGroupManager('part_number_job_1', book, groups=groups, pool=5) :param name: name the group :param url: the starting url for the group of Workers :param spider: the custom defined Spider, i.e. subclass of SplashScraper :param worker: the BaseWorker class or else a subclass of it :param group: the <WorkerGroup> class object :param items: a subclass of SplashItems, or some subclass of Item :param loader: the ItemLoader class or else a subclass of it :param exporter: the BaseItemExporter class or else a subclass of it :param kwargs: to use for each <Worker> instance in the group """<line_sep>name:str<line_sep>url:str<line_sep># tasks: Optional[Type[Union[Type[StatefulBook], dict]]] spider:Type[SplashScraper]<line_sep>worker:Type[BaseWorker]=BaseWorker<line_sep>group:Type[BaseGroup]=BaseGroup<line_sep>items:Type[Item]=Item<line_sep>loader:Type[ItemLoader]=ItemLoader<line_sep>exporters:List[Type[Union[Type[BaseItemExporter]]]]=BaseItemExporter<line_sep>workers:int=1<line_sep>kwargs:dict={}<block_end>
<import_from_stmt>pathlib Path<line_sep>CURRRENT_DIR=Path(__file__).parent.absolute()<line_sep>DATA_DIR=CURRRENT_DIR/'data/'<line_sep>
<import_from_stmt>configs cfg<import_from_stmt>src.utils.record_log _logger<import_stmt>tensorflow<as>tf<import_stmt>numpy<as>np<import_from_stmt>abc ABCMeta abstractmethod<class_stmt>ModelTemplate(metaclass=ABCMeta)<block_start><def_stmt>__init__ self token_emb_mat glove_emb_mat tds cds tl scope<block_start>self.scope=scope<line_sep>self.global_step=tf.get_variable('global_step' shape=[] dtype=tf.int32 initializer=tf.constant_initializer(0) trainable=<false>)<line_sep>self.token_emb_mat,self.glove_emb_mat=token_emb_mat glove_emb_mat<line_sep># ---- place holder ----- self.token_seq=tf.placeholder(tf.int32 [<none> <none>] name='token_seq')<line_sep>self.char_seq=tf.placeholder(tf.int32 [<none> <none> tl] name='context_char')<line_sep>self.op_list=tf.placeholder(tf.int32 [<none> <none>] name='op_lists')# bs,sol self.reduce_mat=tf.placeholder(tf.int32 [<none> <none> <none>] name='reduce_mats')# [bs,sol,mc] self.sentiment_label=tf.placeholder(tf.int32 [<none>] name='sentiment_label')# bs self.is_train=tf.placeholder(tf.bool [] name='is_train')<line_sep># ----------- parameters ------------- self.tds,self.cds=tds cds<line_sep>self.tl=tl<line_sep>self.tel=cfg.word_embedding_length<line_sep>self.cel=cfg.char_embedding_length<line_sep>self.cos=cfg.char_out_size<line_sep>self.ocd=list(map(int cfg.out_channel_dims.split(',')))<line_sep>self.fh=list(map(int cfg.filter_heights.split(',')))<line_sep>self.hn=cfg.hidden_units_num<line_sep>self.finetune_emb=cfg.fine_tune<line_sep>self.output_class=5<if>cfg.fine_grained<else>2<line_sep>self.bs=tf.shape(self.token_seq)[0]<line_sep>self.sl=tf.shape(self.token_seq)[1]<line_sep>self.ol=tf.shape(self.op_list)[1]<line_sep>self.mc=tf.shape(self.reduce_mat)[2]<line_sep># ------------ other --------- self.token_mask=tf.cast(self.token_seq tf.bool)<line_sep>self.char_mask=tf.cast(self.char_seq tf.bool)<line_sep>self.token_len=tf.reduce_sum(tf.cast(self.token_mask tf.int32) -1)<line_sep>self.char_len=tf.reduce_sum(tf.cast(self.char_mask tf.int32) -1)<line_sep>self.stack_mask=tf.not_equal(self.op_list tf.zeros_like(self.op_list))<line_sep>self.tensor_dict={}<line_sep># ------ start ------ self.logits=<none><line_sep>self.loss=<none><line_sep>self.accuracy=<none><line_sep>self.var_ema=<none><line_sep>self.ema=<none><line_sep>self.summary=<none><line_sep>self.opt=<none><line_sep>self.train_op=<none><block_end>@abstractmethod<def_stmt>build_network self<block_start><pass><block_end><def_stmt>build_loss self# weight_decay <block_start><with_stmt>tf.name_scope("weight_decay")<block_start><for_stmt>var set(tf.get_collection('reg_vars' self.scope))<block_start>weight_decay=tf.multiply(tf.nn.l2_loss(var) cfg.wd name="{}-wd".format('-'.join(str(var.op.name).split('/'))))<line_sep>tf.add_to_collection('losses' weight_decay)<block_end><block_end>reg_vars=tf.get_collection('losses' self.scope)<line_sep>trainable_vars=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES self.scope)<line_sep>_logger.add('regularization var num: %d'%len(reg_vars))<line_sep>_logger.add('trainable var num: %d'%len(trainable_vars))<line_sep>losses=tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.sentiment_label logits=self.logits)<line_sep>tf.add_to_collection('losses' tf.reduce_mean(losses name='xentropy_loss_mean'))<line_sep>loss=tf.add_n(tf.get_collection('losses' self.scope) name='loss')<line_sep>tf.summary.scalar(loss.op.name loss)<line_sep>tf.add_to_collection('ema/scalar' loss)<line_sep><return>loss<block_end><def_stmt>build_accuracy self<block_start>correct=tf.equal(tf.cast(tf.argmax(self.logits -1) tf.int32) self.sentiment_label)<line_sep># [bs] <return>tf.cast(correct tf.float32)<block_end><def_stmt>update_tensor_add_ema_and_opt self<block_start>self.logits=self.build_network()<line_sep>self.loss=self.build_loss()<line_sep>self.accuracy=self.build_accuracy()<line_sep># ------------ema------------- <if_stmt><true><block_start>self.var_ema=tf.train.ExponentialMovingAverage(cfg.var_decay)<line_sep>self.build_var_ema()<block_end><if_stmt>cfg.mode<eq>'train'<block_start>self.ema=tf.train.ExponentialMovingAverage(cfg.decay)<line_sep>self.build_ema()<block_end>self.summary=tf.summary.merge_all()<line_sep># ---------- optimization --------- <if_stmt>cfg.optimizer.lower()<eq>'adadelta'<block_start><assert_stmt>cfg.learning_rate<g>0.1<and>cfg.learning_rate<l>1.<line_sep>self.opt=tf.train.AdadeltaOptimizer(cfg.learning_rate)<block_end><elif_stmt>cfg.optimizer.lower()<eq>'adam'<block_start><assert_stmt>cfg.learning_rate<l>0.1<line_sep>self.opt=tf.train.AdamOptimizer(cfg.learning_rate)<block_end><elif_stmt>cfg.optimizer.lower()<eq>'rmsprop'<block_start><assert_stmt>cfg.learning_rate<l>0.1<line_sep>self.opt=tf.train.RMSPropOptimizer(cfg.learning_rate)<block_end><else_stmt><block_start><raise>AttributeError('no optimizer named as \'%s\''%cfg.optimizer)<block_end>self.train_op=self.opt.minimize(self.loss self.global_step var_list=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES self.scope))<block_end><def_stmt>build_var_ema self<block_start>ema_op=self.var_ema.apply(tf.trainable_variables() )<with_stmt>tf.control_dependencies([ema_op])<block_start>self.loss=tf.identity(self.loss)<block_end><block_end><def_stmt>build_ema self<block_start>tensors=tf.get_collection("ema/scalar" scope=self.scope)+tf.get_collection("ema/vector" scope=self.scope)<line_sep>ema_op=self.ema.apply(tensors)<for_stmt>var tf.get_collection("ema/scalar" scope=self.scope)<block_start>ema_var=self.ema.average(var)<line_sep>tf.summary.scalar(ema_var.op.name ema_var)<block_end><for_stmt>var tf.get_collection("ema/vector" scope=self.scope)<block_start>ema_var=self.ema.average(var)<line_sep>tf.summary.histogram(ema_var.op.name ema_var)<block_end><with_stmt>tf.control_dependencies([ema_op])<block_start>self.loss=tf.identity(self.loss)<block_end><block_end><def_stmt>get_feed_dict self sample_batch data_type='train'# max lens <block_start>sl,ol,mc=0 0 0<for_stmt>sample sample_batch<block_start>sl=max(sl len(sample['root_node']['token_seq']))<line_sep>ol=max(ol len(sample['shift_reduce_info']['op_list']))<for_stmt>reduce_list sample['shift_reduce_info']['reduce_mat']<block_start>mc=max(mc len(reduce_list))<block_end><block_end><assert_stmt>mc<eq>0<or>mc<eq>2 mc<line_sep># token and char token_seq_b=[]<line_sep>char_seq_b=[]<for_stmt>sample sample_batch<block_start>token_seq=np.zeros([sl] cfg.intX)<line_sep>char_seq=np.zeros([sl self.tl] cfg.intX)<for_stmt>idx_t,(token char_seq_v) enumerate(zip(sample['root_node']['token_seq_digital'] sample['root_node']['char_seq_digital']))<block_start>token_seq[idx_t]=token<for_stmt>idx_c,char enumerate(char_seq_v)<block_start><if_stmt>idx_c<ge>self.tl<block_start><break><block_end>char_seq[idx_t idx_c]=char<block_end><block_end>token_seq_b.append(token_seq)<line_sep>char_seq_b.append(char_seq)<block_end>token_seq_b=np.stack(token_seq_b)<line_sep>char_seq_b=np.stack(char_seq_b)<line_sep># tree op_list_b=[]<line_sep>reduce_mat_b=[]<for_stmt>sample sample_batch<block_start>op_list=np.zeros([ol] cfg.intX)<line_sep>reduce_mat=np.zeros([ol mc] cfg.intX)<for_stmt>idx_o,(op reduce_list) enumerate(zip(sample['shift_reduce_info']['op_list'] sample['shift_reduce_info']['reduce_mat']))<block_start>op_list[idx_o]=op<for_stmt>idx_m,red enumerate(reduce_list)<block_start>reduce_mat[idx_o idx_m]=red<block_end><block_end>op_list_b.append(op_list)<line_sep>reduce_mat_b.append(reduce_mat)<block_end>op_list_b=np.stack(op_list_b)<line_sep>reduce_mat_b=np.stack(reduce_mat_b)<line_sep># label sentiment_label_b=[]<for_stmt>sample sample_batch<block_start>sentiment_float=sample['root_node']['sentiment_label']<line_sep>sentiment_int=cfg.sentiment_float_to_int(sentiment_float)<line_sep>sentiment_label_b.append(sentiment_int)<block_end>sentiment_label_b=np.stack(sentiment_label_b).astype(cfg.intX)<line_sep>feed_dict={self.token_seq:token_seq_b self.char_seq:char_seq_b self.op_list:op_list_b self.reduce_mat:reduce_mat_b self.sentiment_label:sentiment_label_b self.is_train:<true><if>data_type<eq>'train'<else><false>}<line_sep><return>feed_dict<block_end><def_stmt>step self sess batch_samples get_summary=<false><block_start><assert_stmt>isinstance(sess tf.Session)<line_sep>feed_dict=self.get_feed_dict(batch_samples 'train')<line_sep>cfg.time_counter.add_start()<if_stmt>get_summary<block_start>loss,summary,train_op=sess.run([self.loss self.summary self.train_op] feed_dict=feed_dict)<block_end><else_stmt><block_start>loss,train_op=sess.run([self.loss self.train_op] feed_dict=feed_dict)<line_sep>summary=<none><block_end>cfg.time_counter.add_stop()<line_sep><return>loss summary train_op<block_end><block_end>
# Copyright (c) OpenMMLab. All rights reserved. <import_from_stmt>.eval_map eval_rbbox_map<line_sep>__all__=['eval_rbbox_map']<line_sep>
#python OrderedDict <class_stmt>LRUCache<block_start><def_stmt>__init__ self capacity:int<block_start>self.cache=collections.OrderedDict()<line_sep>self.capacity=capacity<block_end><def_stmt>get self key:int<arrow>int<block_start><if_stmt>key<not><in>self.cache<block_start><return>-1<block_end>val=self.cache.pop[key]<line_sep>self.cache[key]=val<line_sep><return>val<block_end><def_stmt>put self key:int value:int<arrow><none><block_start><if_stmt>key<in>self.cache<block_start><del_stmt>self.cache[key]<block_end><elif_stmt>len(self.cache)<eq>self.capacity<block_start>self.cache.popitem(last=<false>)<block_end>self.cache[key]=value<block_end><block_end><class_stmt>LRUCache<block_start><def_stmt>__init__ self MSize<block_start>self.size=MSize<line_sep>self.cache={}<line_sep>self.next,self.before={} {}<line_sep>self.head,self.tail='#' '$'<line_sep>self.connect(self.head self.tail)<block_end><def_stmt>connect self a b<block_start>self.next[a],self.before[b]=b a<block_end><def_stmt>delete self key<block_start>self.connect(self.before[key] self.next[key])<del_stmt>self.before[key] self.next[key] self.cache[key]<block_end><def_stmt>append self k v<block_start>self.cache[k]=v<line_sep>self.connect(self.before[self.tail] k)<line_sep>self.connect(k self.tail)<if_stmt>len(self.cache)<g>self.size<block_start>self.delete(self.next[self.head])<block_end><block_end><def_stmt>get self key<block_start><if_stmt>key<not><in>self.cache<block_start><return>-1<block_end>val=self.cache[key]<line_sep>self.delete(key)<line_sep>self.append(key val)<line_sep><return>val<block_end><def_stmt>put self key value<block_start><if_stmt>key<in>self.cache<block_start>self.delete(key)<block_end>self.append(key value)<block_end><block_end>#Push in tail, delete from head <class_stmt>ListNode<block_start><def_stmt>__init__ self key val<block_start>self.key=key<line_sep>self.val=val<line_sep>self.next=<none><line_sep>self.prev=<none><block_end><block_end><class_stmt>LinkedList<block_start><def_stmt>__init__ self <block_start>self.head=<none><line_sep>self.tail=<none><block_end><def_stmt>insert self node<block_start>node.next,node.prev=<none> <none><if_stmt>self.head<block_start>self.tail.next=node<line_sep>node.prev=self.tail<block_end><else_stmt><block_start>self.head=node<block_end>self.tail=node<block_end><def_stmt>delete self node<block_start><if_stmt>node.prev<block_start>node.prev.next=node.next<block_end><else_stmt><block_start>self.head=node.next<block_end><if_stmt>node.next<block_start>node.next.prev=node.prev<block_end><else_stmt><block_start>self.tail=node.prev<block_end>node.next,node.prev=<none> <none><block_end><block_end><class_stmt>LRUCache<block_start><def_stmt>__init__ self capacity:int<block_start>self.List=LinkedList()<line_sep>self.dic={}<line_sep>self.capacity=capacity<block_end><def_stmt>__insert self key val<block_start><if_stmt>key<in>self.dic<block_start>self.List.delete(self.dic[key])<block_end>node=ListNode(key val)<line_sep>self.List.insert(node)<line_sep>self.dic[key]=node<block_end><def_stmt>get self key:int<arrow>int<block_start><if_stmt>key<not><in>self.dic<block_start><return>-1<block_end>val=self.dic[key].val<line_sep>self.__insert(key val)<line_sep><return>val<block_end><def_stmt>put self key:int value:int<arrow><none><block_start><if_stmt>len(self.dic)<eq>self.capacity<and>key<not><in>self.dic#print("del ",self.List.head.key) <block_start><del_stmt>self.dic[self.List.head.key]<line_sep>self.List.delete(self.List.head)<block_end>self.__insert(key value)<block_end><block_end>#Push in head, delete from tail <class_stmt>ListNode<block_start><def_stmt>__init__ self key val<block_start>self.key=key<line_sep>self.val=val<line_sep>self.next=<none><line_sep>self.prev=<none><block_end><block_end><class_stmt>LinkedList<block_start><def_stmt>__init__ self <block_start>self.head=<none><line_sep>self.tail=<none><block_end><def_stmt>insert self node<block_start>node.next,node.prev=<none> <none><if_stmt><not>self.tail<block_start>self.tail=node<block_end><if_stmt>self.head<block_start>node.next=self.head<line_sep>self.head.prev=node<block_end>self.head=node<block_end><def_stmt>delete self node<block_start><if_stmt>node.prev<block_start>node.prev.next=node.next<block_end><else_stmt><block_start>self.head=node.next<block_end><if_stmt>node.next<block_start>node.next.prev=node.prev<block_end><else_stmt><block_start>self.tail=node.prev<block_end>node.next,node.prev=<none> <none><block_end><block_end><class_stmt>LRUCache<block_start><def_stmt>__init__ self capacity:int<block_start>self.List=LinkedList()<line_sep>self.dic={}<line_sep>self.capacity=capacity<block_end><def_stmt>__insert self key val<block_start><if_stmt>key<in>self.dic<block_start>self.List.delete(self.dic[key])<block_end>node=ListNode(key val)<line_sep>self.List.insert(node)<line_sep>self.dic[key]=node<block_end><def_stmt>get self key:int<arrow>int<block_start><if_stmt>key<not><in>self.dic<block_start><return>-1<block_end>val=self.dic[key].val<line_sep>self.__insert(key val)<line_sep><return>val<block_end><def_stmt>put self key:int value:int<arrow><none><block_start><if_stmt>len(self.dic)<eq>self.capacity<and>key<not><in>self.dic#print("del ",self.List.tail.key) <block_start><del_stmt>self.dic[self.List.tail.key]<line_sep>self.List.delete(self.List.tail)<block_end>self.__insert(key value)<block_end><block_end>
"""Test the Google Maps Travel Time config flow."""<import_stmt>pytest<import_from_stmt>homeassistant config_entries data_entry_flow<import_from_stmt>homeassistant.components.google_travel_time.const ARRIVAL_TIME CONF_ARRIVAL_TIME CONF_AVOID CONF_DEPARTURE_TIME CONF_DESTINATION CONF_LANGUAGE CONF_ORIGIN CONF_TIME CONF_TIME_TYPE CONF_TRAFFIC_MODEL CONF_TRANSIT_MODE CONF_TRANSIT_ROUTING_PREFERENCE CONF_UNITS DEFAULT_NAME DEPARTURE_TIME DOMAIN <import_from_stmt>homeassistant.const CONF_API_KEY CONF_MODE CONF_NAME CONF_UNIT_SYSTEM_IMPERIAL <import_from_stmt>tests.components.google_travel_time.const MOCK_CONFIG<line_sep>@pytest.mark.usefixtures("validate_config_entry" "bypass_setup")<async_keyword><def_stmt>test_minimum_fields hass<block_start>"""Test we get the form."""<line_sep>result=<await>hass.config_entries.flow.async_init(DOMAIN context={"source":config_entries.SOURCE_USER})<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_FORM<assert_stmt>result["errors"]<eq>{}<line_sep>result2=<await>hass.config_entries.flow.async_configure(result["flow_id"] MOCK_CONFIG )<assert_stmt>result2["type"]<eq>data_entry_flow.RESULT_TYPE_CREATE_ENTRY<assert_stmt>result2["title"]<eq>DEFAULT_NAME<assert_stmt>result2["data"]<eq>{CONF_NAME:DEFAULT_NAME CONF_API_KEY:"api_key" CONF_ORIGIN:"location1" CONF_DESTINATION:"location2" }<block_end>@pytest.mark.usefixtures("invalidate_config_entry")<async_keyword><def_stmt>test_invalid_config_entry hass<block_start>"""Test we get the form."""<line_sep>result=<await>hass.config_entries.flow.async_init(DOMAIN context={"source":config_entries.SOURCE_USER})<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_FORM<assert_stmt>result["errors"]<eq>{}<line_sep>result2=<await>hass.config_entries.flow.async_configure(result["flow_id"] MOCK_CONFIG )<assert_stmt>result2["type"]<eq>data_entry_flow.RESULT_TYPE_FORM<assert_stmt>result2["errors"]<eq>{"base":"cannot_connect"}<block_end>@pytest.mark.parametrize("data,options" [(MOCK_CONFIG {CONF_MODE:"driving" CONF_ARRIVAL_TIME:"test" CONF_UNITS:CONF_UNIT_SYSTEM_IMPERIAL } )] )@pytest.mark.usefixtures("validate_config_entry")<async_keyword><def_stmt>test_options_flow hass mock_config<block_start>"""Test options flow."""<line_sep>result=<await>hass.config_entries.options.async_init(mock_config.entry_id data=<none>)<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_FORM<assert_stmt>result["step_id"]<eq>"init"<line_sep>result=<await>hass.config_entries.options.async_configure(result["flow_id"] user_input={CONF_MODE:"driving" CONF_LANGUAGE:"en" CONF_AVOID:"tolls" CONF_UNITS:CONF_UNIT_SYSTEM_IMPERIAL CONF_TIME_TYPE:ARRIVAL_TIME CONF_TIME:"test" CONF_TRAFFIC_MODEL:"best_guess" CONF_TRANSIT_MODE:"train" CONF_TRANSIT_ROUTING_PREFERENCE:"less_walking" } )<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_CREATE_ENTRY<assert_stmt>result["title"]<eq>""<assert_stmt>result["data"]<eq>{CONF_MODE:"driving" CONF_LANGUAGE:"en" CONF_AVOID:"tolls" CONF_UNITS:CONF_UNIT_SYSTEM_IMPERIAL CONF_ARRIVAL_TIME:"test" CONF_TRAFFIC_MODEL:"best_guess" CONF_TRANSIT_MODE:"train" CONF_TRANSIT_ROUTING_PREFERENCE:"less_walking" }<assert_stmt>mock_config.options<eq>{CONF_MODE:"driving" CONF_LANGUAGE:"en" CONF_AVOID:"tolls" CONF_UNITS:CONF_UNIT_SYSTEM_IMPERIAL CONF_ARRIVAL_TIME:"test" CONF_TRAFFIC_MODEL:"best_guess" CONF_TRANSIT_MODE:"train" CONF_TRANSIT_ROUTING_PREFERENCE:"less_walking" }<block_end>@pytest.mark.parametrize("data,options" [(MOCK_CONFIG {})] )@pytest.mark.usefixtures("validate_config_entry")<async_keyword><def_stmt>test_options_flow_departure_time hass mock_config<block_start>"""Test options flow with departure time."""<line_sep>result=<await>hass.config_entries.options.async_init(mock_config.entry_id data=<none>)<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_FORM<assert_stmt>result["step_id"]<eq>"init"<line_sep>result=<await>hass.config_entries.options.async_configure(result["flow_id"] user_input={CONF_MODE:"driving" CONF_LANGUAGE:"en" CONF_AVOID:"tolls" CONF_UNITS:CONF_UNIT_SYSTEM_IMPERIAL CONF_TIME_TYPE:DEPARTURE_TIME CONF_TIME:"test" CONF_TRAFFIC_MODEL:"best_guess" CONF_TRANSIT_MODE:"train" CONF_TRANSIT_ROUTING_PREFERENCE:"less_walking" } )<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_CREATE_ENTRY<assert_stmt>result["title"]<eq>""<assert_stmt>result["data"]<eq>{CONF_MODE:"driving" CONF_LANGUAGE:"en" CONF_AVOID:"tolls" CONF_UNITS:CONF_UNIT_SYSTEM_IMPERIAL CONF_DEPARTURE_TIME:"test" CONF_TRAFFIC_MODEL:"best_guess" CONF_TRANSIT_MODE:"train" CONF_TRANSIT_ROUTING_PREFERENCE:"less_walking" }<assert_stmt>mock_config.options<eq>{CONF_MODE:"driving" CONF_LANGUAGE:"en" CONF_AVOID:"tolls" CONF_UNITS:CONF_UNIT_SYSTEM_IMPERIAL CONF_DEPARTURE_TIME:"test" CONF_TRAFFIC_MODEL:"best_guess" CONF_TRANSIT_MODE:"train" CONF_TRANSIT_ROUTING_PREFERENCE:"less_walking" }<block_end>@pytest.mark.usefixtures("validate_config_entry" "bypass_setup")<async_keyword><def_stmt>test_dupe hass<block_start>"""Test setting up the same entry data twice is OK."""<line_sep>result=<await>hass.config_entries.flow.async_init(DOMAIN context={"source":config_entries.SOURCE_USER})<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_FORM<assert_stmt>result["errors"]<eq>{}<line_sep>result2=<await>hass.config_entries.flow.async_configure(result["flow_id"] {CONF_API_KEY:"test" CONF_ORIGIN:"location1" CONF_DESTINATION:"location2" } )<assert_stmt>result2["type"]<eq>data_entry_flow.RESULT_TYPE_CREATE_ENTRY<line_sep>result=<await>hass.config_entries.flow.async_init(DOMAIN context={"source":config_entries.SOURCE_USER})<assert_stmt>result["type"]<eq>data_entry_flow.RESULT_TYPE_FORM<assert_stmt>result["errors"]<eq>{}<line_sep>result2=<await>hass.config_entries.flow.async_configure(result["flow_id"] {CONF_API_KEY:"test" CONF_ORIGIN:"location1" CONF_DESTINATION:"location2" } )<line_sep><await>hass.async_block_till_done()<assert_stmt>result2["type"]<eq>data_entry_flow.RESULT_TYPE_CREATE_ENTRY<block_end>
<import_stmt>pytest<import_from_stmt>pytest approx<import_from_stmt>datetime datetime<import_from_stmt>pymap3d.timeconv str2dt<import_stmt>pymap3d.sidereal<as>pms<line_sep>t0=datetime(2014 4 6 8)<def_stmt>test_juliantime <block_start><assert_stmt>pms.juliandate(t0)<eq>approx(2.456753833333e6)<block_end><def_stmt>test_types <block_start>np=pytest.importorskip("numpy")<assert_stmt>str2dt(t0)<eq>t0# passthrough <assert_stmt>str2dt("2014-04-06T08:00:00")<eq>t0<line_sep>ti=[str2dt("2014-04-06T08:00:00") str2dt("2014-04-06T08:01:02")]<line_sep>to=[t0 datetime(2014 4 6 8 1 2)]<assert_stmt>ti<eq>to# even though ti is numpy array of datetime and to is list of datetime t1=[t0 t0]<assert_stmt>(np.asarray(str2dt(t1))<eq>t0).all()<block_end><def_stmt>test_datetime64 <block_start>np=pytest.importorskip("numpy")<line_sep>t1=np.datetime64(t0)<assert_stmt>str2dt(t1)<eq>t0<line_sep>t1=np.array([np.datetime64(t0) np.datetime64(t0)])<assert_stmt>(str2dt(t1)<eq>t0).all()<block_end><def_stmt>test_xarray_time <block_start>xarray=pytest.importorskip("xarray")<line_sep>t={"time":t0}<line_sep>ds=xarray.Dataset(t)<assert_stmt>str2dt(ds["time"])<eq>t0<line_sep>t2={"time":[t0 t0]}<line_sep>ds=xarray.Dataset(t2)<assert_stmt>(str2dt(ds["time"])<eq>t0).all()<block_end><def_stmt>test_pandas_time <block_start>pandas=pytest.importorskip("pandas")<line_sep>t=pandas.Series(t0)<assert_stmt>(str2dt(t)<eq>t0).all()<line_sep>t=pandas.Series([t0 t0])<assert_stmt>(str2dt(t)<eq>t0).all()<block_end>
<import_stmt>gc<def_stmt>start <block_start>gc.collect(0)<line_sep>gc.collect(1)<line_sep>gc.collect(2)<line_sep>l=[]<line_sep>l.append(l)<del_stmt>l<line_sep>gc.collect(2)<block_end>gc.collect()<line_sep>start()<line_sep>
<import_from_stmt>unittest TestCase<import_stmt>numpy<as>np<import_from_stmt>copulas.bivariate.independence Independence<class_stmt>TestIndependence(TestCase)<block_start><def_stmt>test___init__ self<block_start>"""Independence copula can be instantiated directly."""<line_sep># Setup / Run instance=Independence()<line_sep># Check <assert_stmt>isinstance(instance Independence)<assert_stmt>instance.theta<is><none><assert_stmt>instance.tau<is><none><block_end><def_stmt>test_fit self<block_start>"""Fit checks that the given values are independent."""<line_sep># Setup instance=Independence()<line_sep>data=np.array([[1 2] [4 3]])<line_sep># Run instance.fit(data)<line_sep># Check instance.tau<is><none><line_sep>instance.theta<is><none><block_end><def_stmt>test_cumulative_distribution self<block_start>"""cumulative_distribution is the product of both probabilities."""<line_sep># Setup instance=Independence()<line_sep>data=np.array([[0.0 0.0] [0.1 0.1] [0.2 0.2] [0.5 0.5] [0.9 0.9] [1.0 1.0]])<line_sep>expected_result=np.array([0.00 0.01 0.04 0.25 0.81 1.00 ])<line_sep># Run result=instance.cumulative_distribution(data)<line_sep># Check (result<eq>expected_result).all().all()<block_end><block_end>
""" =================== Save a set of views =================== Save some views in png files. """<import_from_stmt>surfer Brain<line_sep>print(__doc__)<line_sep>sub='fsaverage'<line_sep>hemi='lh'<line_sep>surf='inflated'<line_sep>brain=Brain(sub hemi surf)<line_sep>############################################################################### # save 1 image brain.show_view('lat')<line_sep>brain.save_image("%s_lat.png"%sub)<line_sep>############################################################################### # save some images brain.save_imageset(sub ['med' 'lat' 'ros' 'caud'] 'jpg')<line_sep>
<import_from_stmt>.utils utils<import_from_stmt>discord.ext commands<import_stmt>traceback<import_stmt>datetime<import_stmt>discord<class_stmt>Welcome(commands.Cog)#Allow to welcome new members who join guild. If it enable, will send them a message. <block_start><def_stmt>__init__ self bot<block_start>self.bot=bot<line_sep>self.redis=bot.db.redis<block_end><async_keyword><def_stmt>error self owner e# await owner.send("There is an error with a newcomer, please report this to the creator.\n {}".format(e)) <block_start>Current_Time=datetime.datetime.utcnow().strftime("%b/%d/%Y %H:%M:%S UTC")<line_sep>utils.prRed(Current_Time)<line_sep>utils.prRed("Error!")<line_sep>utils.prRed(traceback.format_exc())<line_sep>error='```py\n{}\n```'.format(traceback.format_exc())<line_sep><await>self.bot.owner.send("```py\n{}```".format(Current_Time+"\n"+"ERROR!")+"\n"+error)<block_end>@commands.Cog.listener()<async_keyword><def_stmt>on_member_join self member<block_start><if_stmt><await>self.redis.hget("{}:Config:Cogs".format(member.guild.id) "welcome")<eq>"on"<block_start>config=<await>self.redis.hgetall("{}:Welcome:Message".format(member.guild.id))<try_stmt><block_start><if_stmt>config.get("enable_message")<eq>"on"<block_start>msg=config["message"].format(user=member.name server=member.guild user_mention=member.mention)<if_stmt>config.get("enable_delete")<eq>"on"<block_start>time=int(config["delete_msg"])<block_end><else_stmt><block_start>time=<none><block_end><if_stmt>config.get("whisper")<eq>"on"<block_start><await>member.send(msg delete_after=time)<block_end><else_stmt><block_start><await>self.bot.get_channel(int(config["channel"])).send(msg delete_after=time)<block_end><block_end>#Now assign a roles. <if_stmt>config.get("role")<eq>"on"<block_start>role_list=<await>self.redis.smembers('{}:Welcome:Assign_Roles'.format(member.guild.id))<line_sep>role_obj=[]<for_stmt>x role_list<block_start><if_stmt>x<eq>''#if it return empty string <block_start><continue><block_end># role_obj.append(discord.utils.get(member.guild.roles,id=int(x))) role_obj.append(member.guild.get_role(int(x)))<block_end><try_stmt><block_start><await>member.add_roles(*role_obj reason="User has join the server,and an admin request to add role(s) to new person")<block_end><except_stmt>discord.Forbidden<block_start><pass>#if unable to add user <block_end><except_stmt>discord.NotFound<block_start><pass><block_end><block_end><block_end>#if it cant find that user. Assume it left server. <except_stmt>Exception<as>e<block_start><await>self.error(member.guild.owner e)<block_end><block_end><block_end><block_end><def_stmt>setup bot<block_start>bot.add_cog(Welcome(bot))<block_end>
<import_stmt>pandas<as>pd<import_stmt>numpy<as>np<import_from_stmt>melusine.prepare_email.body_header_extraction extract_last_body<import_from_stmt>melusine.prepare_email.body_header_extraction extract_body<import_from_stmt>melusine.prepare_email.body_header_extraction extract_header<line_sep>structured_body=[{"meta":{"date":<none> "from":<none> "to":<none>} "structured_text":{"header":"demande document" "text":[{"part":"Bonjour. " "tags":"HELLO"} {"part":"Je vous remercie pour le document" "tags":"BODY"} {"part":"Cordialement," "tags":"GREETINGS"} {"part":"Mr Unknown" "tags":"BODY"} ] } } {"meta":{"date":" mar. 22 mai 2018 à 10:20" "from":" <<EMAIL>> " "to":<none> } "structured_text":{"header":"demande document" "text":[{"part":"Bonjour. " "tags":"HELLO"} {"part":"Merci de bien vouloir prendre connaissance du document ci-joint" "tags":"BODY" } {"part":"Cordialement," "tags":"GREETINGS"} {"part":"Votre mutuelle" "tags":"BODY"} {"part":"La visualisation des fichiers PDF nécessite Adobe Reader." "tags":"FOOTER" } ] } } ]<def_stmt>test_extract_last_body <block_start>input_df=pd.DataFrame({"structured_body":[structured_body]})<line_sep>output_df=pd.Series(["Je vous remercie pour le document "])<line_sep>result=input_df.apply(extract_last_body axis=1)<line_sep>pd.testing.assert_series_equal(result output_df)<block_end>message_dict={"meta":{"date":" mar. 22 mai 2018 à 10:20" "from":" <<EMAIL>> " "to":<none> } "structured_text":{"header":"demande document" "text":[{"part":"Bonjour. " "tags":"HELLO"} {"part":"Merci de bien vouloir prendre connaissance du document ci-joint" "tags":"BODY" } {"part":"Cordialement," "tags":"GREETINGS"} {"part":"Votre mutuelle" "tags":"BODY"} {"part":"La visualisation des fichiers PDF nécessite Adobe Reader." "tags":"FOOTER" } ] } }<def_stmt>test_extract_body <block_start>input_dict=message_dict<line_sep>output="Merci de bien vouloir prendre connaissance du document ci-joint "<line_sep>result=extract_body(input_dict)<line_sep>np.testing.assert_string_equal(result output)<block_end><def_stmt>test_extract_header <block_start>input_dict=message_dict<line_sep>output="demande document"<line_sep>result=extract_header(input_dict)<line_sep>np.testing.assert_string_equal(result output)<block_end>
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>particleFlowDisplacedVertexCandidate=cms.EDProducer("PFDisplacedVertexCandidateProducer" # The track collection use for the fitting. May be any collection. # The only condition is that it shall contain the hit pattern information trackCollection=cms.InputTag("generalTracks") # verbosity verbose=cms.untracked.bool(<false>) # Debug flag debug=cms.untracked.bool(<false>) # maximum dca distance for two tracks to be linked dcaCut=cms.double(0.5) # minimum distance of secondary vertex with respect to the primary primaryVertexCut=cms.double(1.8) # maximum distance between the DCA Point and the inner hit of the track # not used for the moment dcaPInnerHitCut=cms.double(1000.0) # Primary vertex information used for dxy calculation mainVertexLabel=cms.InputTag("offlinePrimaryVertices" "") offlineBeamSpotLabel=cms.InputTag("offlineBeamSpot" "") # Tracks preselection to reduce the combinatorics in PFDisplacedVertexCandidates # this cuts are repeated then in a smarter way in the PFDisplacedVertexFinder # be sure you are consistent between them. tracksSelectorParameters=cms.PSet(# selection parameters for secondary tracks nChi2_max=cms.double(5.) pt_min=cms.double(.2) # if the tracks is not a good candidate to be a secondary (dxy cut) restrict in minimal pt # this cut reduce drastically the combinatorics. It is very useful to reduce the # PFDisplacedVertex timing pt_min_prim=cms.double(.8) dxy=cms.double(.2) ))<line_sep>
# -*- coding: utf-8 -*- ### # (C) Copyright (2012-2017) Hewlett Packard Enterprise Development LP # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. ### <import_from_future_stmt> print_function<import_from_future_stmt> unicode_literals<import_from_future_stmt> division<import_from_future_stmt> absolute_import<import_from_stmt>future standard_library<line_sep>standard_library.install_aliases()<import_from_stmt>hpOneView.resources.resource ResourceClient<class_stmt>IdPools(object)<block_start>""" Class for Id Pools API client. """<line_sep>URI='/rest/id-pools'<def_stmt>__init__ self con<block_start>self._client=ResourceClient(con self.URI)<block_end><def_stmt>get self id_or_uri<block_start>""" Gets a pool. Args: id_or_uri: Can be either the range ID or URI. Returns: dict: Pool resource. """<line_sep><return>self._client.get(id_or_uri)<block_end><def_stmt>enable self information id_or_uri timeout=-1<block_start>""" Enables or disables a pool. Args: information (dict): Information to update. id_or_uri: ID or URI of range. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: Updated resource. """<line_sep>uri=self._client.build_uri(id_or_uri)<line_sep><return>self._client.update(information uri timeout=timeout)<block_end><def_stmt>validate_id_pool self id_or_uri ids_pools<block_start>""" Validates an ID pool. Args: id_or_uri: ID or URI of range. ids_pools (list): List of Id Pools. Returns: dict: A dict containing a list with IDs. """<line_sep>uri=self._client.build_uri(id_or_uri)+"/validate?idList="+"&idList=".join(ids_pools)<line_sep><return>self._client.get(uri)<block_end><def_stmt>validate self information id_or_uri timeout=-1<block_start>""" Validates a set of user specified IDs to reserve in the pool. This API can be used to check if the specified IDs can be allocated. Args: information (dict): Information to update. Can result in system specified IDs or the system reserving user-specified IDs. id_or_uri: ID or URI of vSN range. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: A dict containing a list with IDs. """<line_sep>uri=self._client.build_uri(id_or_uri)+"/validate"<line_sep><return>self._client.update(information uri timeout=timeout)<block_end><def_stmt>allocate self information id_or_uri timeout=-1<block_start>""" Allocates a set of IDs from range. The allocator returned contains the list of IDs successfully allocated. Args: information (dict): Information to update. Can result in system specified IDs or the system reserving user-specified IDs. id_or_uri: ID or URI of vSN range. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: A dict containing a list with IDs. """<line_sep>uri=self._client.build_uri(id_or_uri)+"/allocator"<line_sep><return>self._client.update(information uri timeout=timeout)<block_end><def_stmt>collect self information id_or_uri timeout=-1<block_start>""" Collects one or more IDs to be returned to a pool. Args: information (dict): The list of IDs to be collected id_or_uri: ID or URI of range timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: Collector containing list of collected IDs successfully collected. """<line_sep>uri=self._client.build_uri(id_or_uri)+"/collector"<line_sep><return>self._client.update(information uri timeout=timeout)<block_end><def_stmt>get_check_range_availability self id_or_uri ids_pools<block_start>""" Checks the range availability in the ID pool. Args: id_or_uri: ID or URI of range. ids_pools (list): List of Id Pools. Returns: dict: A dict containing a list with IDs. """<line_sep>uri=self._client.build_uri(id_or_uri)+"/checkrangeavailability?idList="+"&idList=".join(ids_pools)<line_sep><return>self._client.get(uri)<block_end><def_stmt>generate self id_or_uri<block_start>""" Generates and returns a random range. Args: id_or_uri: ID or URI of range. Returns: dict: A dict containing a list with IDs. """<line_sep>uri=self._client.build_uri(id_or_uri)+"/generate"<line_sep><return>self._client.get(uri)<block_end><block_end>
<import_from_stmt>hearthstone cardxml<def_stmt>test_cardxml_load <block_start>cardid_db,_=cardxml.load()<line_sep>dbf_db,_=cardxml.load_dbf()<assert_stmt>cardid_db<assert_stmt>dbf_db<for_stmt>card_id,card cardid_db.items()<block_start><assert_stmt>dbf_db[card.dbf_id].id<eq>card_id<block_end><for_stmt>dbf_id,card dbf_db.items()<block_start><assert_stmt>cardid_db[card.id].dbf_id<eq>dbf_id<block_end><assert_stmt>cardid_db["EX1_001"].quest_reward<eq>""<assert_stmt>cardid_db["UNG_940"].quest_reward<eq>"UNG_940t8"<block_end>
<import_from_stmt>.models kinesisvideoarchivedmedia_backends<import_from_stmt>..core.models base_decorator<line_sep>kinesisvideoarchivedmedia_backend=kinesisvideoarchivedmedia_backends["us-east-1"]<line_sep>mock_kinesisvideoarchivedmedia=base_decorator(kinesisvideoarchivedmedia_backends)<line_sep>
# -*- coding: future_fstrings -*- # # Copyright 2019 <NAME> <<EMAIL>> # # This file is part of Salus # (see https://github.com/SymbioticLab/Salus). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Get mem allocation for one iteration, to plot CDF. See card#275 LaneMgr: enabled InLane Scheduler: pack Collected data: allocation """<import_from_future_stmt> absolute_import print_function division unicode_literals<import_stmt>inspect<import_from_stmt>absl flags<import_from_stmt>benchmarks.driver.runner Executor<import_from_stmt>benchmarks.driver.server.config presets<import_from_stmt>benchmarks.driver.workload WTL<import_from_stmt>benchmarks.exps run_seq maybe_forced_preset run_tfdist case_switch_main<line_sep>FLAGS=flags.FLAGS<def_stmt>case1 argv<block_start>model,bs,bn='inception3' 50 10<line_sep>name=inspect.currentframe().f_code.co_name<line_sep>scfg=maybe_forced_preset(presets.AllocProf)<line_sep>scfg.scheduler='pack'<line_sep>wl=WTL.create(model bs bn)<line_sep>run_seq(scfg.copy(output_dir=FLAGS.save_dir/name) wl)<block_end><def_stmt>case2 argv<block_start>model,bs,bn='inception3' 50 10<line_sep>name=inspect.currentframe().f_code.co_name<line_sep>scfg=maybe_forced_preset(presets.OpTracing)<line_sep>scfg.logconf='memop'<line_sep>scfg.scheduler='pack'<line_sep>wl=WTL.create(model bs bn)<line_sep>run_seq(scfg.copy(output_dir=FLAGS.save_dir/name) wl)<block_end>@case_switch_main<def_stmt>main <block_start><return>case1 case2<block_end>
<import_stmt>logging<import_from_stmt>.abstractprotocol AbstractProtocol<import_from_stmt>.protocol_helpers crcJK232<as>crc<line_sep>log=logging.getLogger("jk232")<line_sep># Read basic information and status # DD A5 03 00 FF FD 77 # start bit 0xDD # status 0xA5 means read, status 0x5A means write. # command code 0x03 # Data length: 1 byte, indicating the effective length of the data carried in the frame. # Data content: N bytes, the content carried by the frame data, when the data length is 0, there is no such part. # Verification: 2 bytes, # the verification field is "command code + length byte + data segment content", # the verification method is thesum of the above fields and then the inverse plus 1, the high bit is in the front and the low bit is in the back. # Stop bit: 1 byte, indicating the end of a frame of data, fixed as 0x77; COMMANDS={"getBalancerData":{"name":"getBalancerData" "command_code":"03" "description":"Get Balancer Data" "help":" -- Get Balancer Data" "type":"QUERY" "checksum_required":"True" "response_type":"POSITIONAL" "response":[["Hex2Str" 1 "Start Byte" ""] ["Hex2Str" 1 "Command Code" ""] ["Hex2Str" 1 "Status" ""] ["Hex2Int" 1 "Data Length" ""] ["BigHex2Short:r/100" 2 "Total Battery Voltage" "V"] ["BigHex2Short:r/100" 2 "Total Current" "A"] ["BigHex2Short:r/100" 2 "Remaining Capacity" "Ah"] ["BigHex2Short:r/100" 2 "Nominal Capacity" "Ah"] ["BigHex2Short" 2 "Cycles" "cycles"] ["Hex2Str" 2 "Production Date" ""] ["Hex2Str" 2 "Equilibrium State (TODO)" ""] ["Hex2Str" 2 "Equilibrium State 2 (TODO)" ""] ["Hex2Str" 2 "Protection State (TODO)" ""] ["Hex2Str" 1 "Keep" ""] ["Hex2Int" 1 "Remaining Battery" "%"] ["Hex2Str" 1 "FET Control Status" ""] ["Hex2Int" 1 "Number of Battery Strings" ""] ["Hex2Int" 1 "Number of NTC" ""] ["BigHex2Short:(r-2731)/10" 2 "NTC 1" "°C"] ["BigHex2Short:(r-2731)/10" 2 "NTC 2" "°C"] ["Hex2Str" 2 "Checksum" ""] ["Hex2Str" 1 "End Byte" ""] ] "test_responses":[bytes.fromhex("DD 03 00 1B 17 00 00 00 02 D0 03 E8 00 00 20 78 00 00 00 00 00 00 10 48 03 0F 02 0B 76 0B 82 FB FF 77") ] } }<class_stmt>jk232(AbstractProtocol)<block_start><def_stmt>__init__ self *args **kwargs<arrow><none><block_start>super().__init__()<line_sep>self._protocol_id=b"JK232"<line_sep>self.COMMANDS=COMMANDS<line_sep>self.STATUS_COMMANDS=["getBalancerData" ]<line_sep>self.SETTINGS_COMMANDS=["" ]<line_sep>self.DEFAULT_COMMAND="getBalancerData"<block_end><def_stmt>get_full_command self command<arrow>bytes<block_start>""" Override the default get_full_command as its different """<line_sep>log.info(f"Using protocol {self._protocol_id} with {len(self.COMMANDS)} commands")<line_sep># These need to be set to allow other functions to work` self._command=command<line_sep>self._command_defn=self.get_command_defn(command)<line_sep># End of required variables setting <if_stmt>self._command_defn<is><none># Maybe return a default here? <block_start><return><none><block_end><if_stmt>"command_code"<in>self._command_defn# Read basic information and status # DD A5 03 00 FF FD 77 # full command is 7 bytes long <block_start>cmd=bytearray(7)<line_sep># start bit 0xDD cmd[0]=0xDD<line_sep>log.debug(f"cmd with start bit: {cmd}")<line_sep># status 0xA5 means read, status 0x5A means write. <if_stmt>self._command_defn["type"]<eq>"SETTER"<block_start>cmd[1]=0x5A<block_end><else_stmt><block_start>cmd[1]=0xA5<block_end># command code 0x03 command_code=int(self._command_defn["command_code"] 16)<line_sep># Data length: 1 byte, indicating the effective length of the data carried in the frame. # Data content: N bytes, the content carried by the frame data, when the data length is 0, there is no such part. data=""<line_sep># TODO: data stuff here data_len=len(data)<if_stmt>data_len<eq>0<block_start>crc_high,crc_low=crc([command_code data_len])<line_sep>cmd[2]=command_code<line_sep>cmd[3]=data_len<line_sep>cmd[4]=crc_high<line_sep>cmd[5]=crc_low<line_sep>cmd[6]=0x77<block_end>log.debug(f"cmd with crc: {cmd}")<line_sep><return>cmd<block_end><block_end><def_stmt>get_responses self response<block_start>""" Override the default get_responses as its different """<line_sep>responses=[]<line_sep># remove \n # response = response.replace(b"\n", b"") <if_stmt>self._command_defn<is><not><none><and>self._command_defn["response_type"]<eq>"POSITIONAL"# Have a POSITIONAL type response, so need to break it up... # example defn : # "response": [ # ["discard", 1, "start flag", ""], # ["discard", 1, "module address", ""], # ["discard", 1, "command id", ""], # ["discard", 1, "data length", ""], # ] # example response data b"\xa5\x01\x90\x08\x02\x10\x00\x00uo\x03\xbc\xf3", <block_start><for_stmt>defn self._command_defn["response"]<block_start>size=defn[1]<line_sep>item=response[:size]<line_sep>responses.append(item)<line_sep>response=response[size:]<block_end><if_stmt>response<block_start>responses.append(response)<block_end>log.debug(f"get_responses: responses {responses}")<line_sep><return>responses<block_end><else_stmt><block_start><return>bytearray(response)<block_end><block_end><block_end>
""" Automatically update the baseline of vcpkg-configuration.json """<import_stmt>json<import_stmt>os<import_stmt>pybamm<def_stmt>update_baseline <block_start>""" Opens vcpkg-configuration.json and updates the baseline with the latest commit id """<line_sep># Get latest commit id from pybamm-team/sundials-vcpkg-registry cmd="git ls-remote https://github.com/pybamm-team/sundials-vcpkg-registry | grep refs/heads/main | cut -f 1 | tr -d '\n'"# noqa: E501 commit_id=os.popen(cmd).read()<line_sep># Open file and write it <with_stmt>open(os.path.join(pybamm.root_dir() "vcpkg-configuration.json") "r+")<as>file<block_start>output=file.read()<line_sep>json_commit_id=json.loads(output)["registries"][0]["baseline"]<line_sep>output=output.replace(json_commit_id commit_id)<line_sep>file.truncate(0)<line_sep>file.seek(0)<line_sep>file.write(output)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>update_baseline()<block_end>
"""Verdict: light-time delay from the Sun can be neglected in is_sunlit()! For the Sun's position from Earth, does calling observe() really make enough difference to justify the expense? Here we take two approaches to answering the question: we compare the difference every day over 40 years, and then we do a back-of-the-envelope estimate of how big we might have expected the effect to be. The two approaches agree! The maximum difference is around 10 mas. What difference does that make for a satellite? Let's take the ISS. With its orbital period of 92 minutes, it sees the Earth swing in a full circle around the sky in that amount of time. That's 360/90 = 4 degrees per minute (!) = 240 arcseconds per second. At that speed, a difference of 10 mas in the Sun's position would at most hasten or delay the moment of sunrise for the ISS by 40 microseconds, which is far below the accuracy of TLE position predictions and can thus be safely incurred. """<import_from_stmt>skyfield api<import_from_stmt>skyfield.api load<line_sep>ts=load.timescale()<line_sep>eph=load('de421.bsp')<line_sep>sun=eph['sun']<line_sep>earth=eph['earth']<line_sep>t=ts.utc(2000 1 range(40<times>365))<line_sep>s1=earth.at(t).observe(sun)<line_sep>s2=(sun-earth).at(t)<line_sep>print('Milliarcseconds (mas) difference:' s1.separation_from(s2).mas().max())<line_sep>print()<line_sep>print('Does that make physical sense?')<line_sep># The Sun orbits around the Solar System barycenter which is usually # inside the Sun's radius but occasionally a bit outside of it. So we # can very roughly imagine the Sun's orbit as its own circumference, # give or take. solar_radius_km=696340<line_sep>distance_sun_travels_in_one_orbit=solar_radius_km<times>api.tau<line_sep># It takes the Sun more than a decade to travel that path, as its orbit # is roughly the opposite of Jupiter's (which takes 12 years to circle # the Sun). So it turns out that it travels a bit slowly. sun_km_per_s=distance_sun_travels_in_one_orbit/10/365.25/24/60/60<line_sep>print('Sun km/s:' sun_km_per_s)<line_sep>light_delay_seconds=s2[0].position.length().light_seconds()<line_sep>print('Sample light delay from Sun to Earth (seconds):' light_delay_seconds)<line_sep>print('How far does the Sun move in that time?')<line_sep>travel_km=light_delay_seconds<times>sun_km_per_s<line_sep>print('Sun km moved during that light travel time:' travel_km)<line_sep>print('What angle does that many kilometers subtend from Earth?')<line_sep>earth_sun_distance_km=150e6<line_sep>travel_angle=api.Angle(radians=travel_km/earth_sun_distance_km)<line_sep>print('Angle traveled by sun in arcseconds:' travel_angle.arcseconds())<line_sep>print('Angle traveled by sun in mas:' travel_angle.mas())<line_sep>print()<line_sep>print(__doc__.rstrip())<line_sep>
<import_stmt>esphome.codegen<as>cg<import_stmt>esphome.config_validation<as>cv<import_from_stmt>esphome core automation<import_from_stmt>esphome.automation maybe_simple_id<import_from_stmt>esphome.const CONF_AUTO_CLEAR_ENABLED CONF_ID CONF_LAMBDA CONF_PAGES CONF_PAGE_ID CONF_ROTATION CONF_FROM CONF_TO CONF_TRIGGER_ID <import_from_stmt>esphome.core coroutine_with_priority<line_sep>IS_PLATFORM_COMPONENT=<true><line_sep>display_ns=cg.esphome_ns.namespace("display")<line_sep>DisplayBuffer=display_ns.class_("DisplayBuffer")<line_sep>DisplayPage=display_ns.class_("DisplayPage")<line_sep>DisplayPagePtr=DisplayPage.operator("ptr")<line_sep>DisplayBufferRef=DisplayBuffer.operator("ref")<line_sep>DisplayPageShowAction=display_ns.class_("DisplayPageShowAction" automation.Action)<line_sep>DisplayPageShowNextAction=display_ns.class_("DisplayPageShowNextAction" automation.Action)<line_sep>DisplayPageShowPrevAction=display_ns.class_("DisplayPageShowPrevAction" automation.Action)<line_sep>DisplayIsDisplayingPageCondition=display_ns.class_("DisplayIsDisplayingPageCondition" automation.Condition)<line_sep>DisplayOnPageChangeTrigger=display_ns.class_("DisplayOnPageChangeTrigger" automation.Trigger)<line_sep>CONF_ON_PAGE_CHANGE="on_page_change"<line_sep>DISPLAY_ROTATIONS={0:display_ns.DISPLAY_ROTATION_0_DEGREES 90:display_ns.DISPLAY_ROTATION_90_DEGREES 180:display_ns.DISPLAY_ROTATION_180_DEGREES 270:display_ns.DISPLAY_ROTATION_270_DEGREES }<def_stmt>validate_rotation value<block_start>value=cv.string(value)<if_stmt>value.endswith("°")<block_start>value=value[:-1]<block_end><return>cv.enum(DISPLAY_ROTATIONS int=<true>)(value)<block_end>BASIC_DISPLAY_SCHEMA=cv.Schema({cv.Optional(CONF_LAMBDA):cv.lambda_ })<line_sep>FULL_DISPLAY_SCHEMA=BASIC_DISPLAY_SCHEMA.extend({cv.Optional(CONF_ROTATION):validate_rotation cv.Optional(CONF_PAGES):cv.All(cv.ensure_list({cv.GenerateID():cv.declare_id(DisplayPage) cv.Required(CONF_LAMBDA):cv.lambda_ }) cv.Length(min=1) ) cv.Optional(CONF_ON_PAGE_CHANGE):automation.validate_automation({cv.GenerateID(CONF_TRIGGER_ID):cv.declare_id(DisplayOnPageChangeTrigger) cv.Optional(CONF_FROM):cv.use_id(DisplayPage) cv.Optional(CONF_TO):cv.use_id(DisplayPage) }) cv.Optional(CONF_AUTO_CLEAR_ENABLED default=<true>):cv.boolean })<async_keyword><def_stmt>setup_display_core_ var config<block_start><if_stmt>CONF_ROTATION<in>config<block_start>cg.add(var.set_rotation(DISPLAY_ROTATIONS[config[CONF_ROTATION]]))<block_end><if_stmt>CONF_AUTO_CLEAR_ENABLED<in>config<block_start>cg.add(var.set_auto_clear(config[CONF_AUTO_CLEAR_ENABLED]))<block_end><if_stmt>CONF_PAGES<in>config<block_start>pages=[]<for_stmt>conf config[CONF_PAGES]<block_start>lambda_=<await>cg.process_lambda(conf[CONF_LAMBDA] [(DisplayBufferRef "it")] return_type=cg.void)<line_sep>page=cg.new_Pvariable(conf[CONF_ID] lambda_)<line_sep>pages.append(page)<block_end>cg.add(var.set_pages(pages))<block_end><for_stmt>conf config.get(CONF_ON_PAGE_CHANGE [])<block_start>trigger=cg.new_Pvariable(conf[CONF_TRIGGER_ID] var)<if_stmt>CONF_FROM<in>conf<block_start>page=<await>cg.get_variable(conf[CONF_FROM])<line_sep>cg.add(trigger.set_from(page))<block_end><if_stmt>CONF_TO<in>conf<block_start>page=<await>cg.get_variable(conf[CONF_TO])<line_sep>cg.add(trigger.set_to(page))<block_end><await>automation.build_automation(trigger [(DisplayPagePtr "from") (DisplayPagePtr "to")] conf)<block_end><block_end><async_keyword><def_stmt>register_display var config<block_start><await>setup_display_core_(var config)<block_end>@automation.register_action("display.page.show" DisplayPageShowAction maybe_simple_id({cv.Required(CONF_ID):cv.templatable(cv.use_id(DisplayPage)) }) )<async_keyword><def_stmt>display_page_show_to_code config action_id template_arg args<block_start>var=cg.new_Pvariable(action_id template_arg)<if_stmt>isinstance(config[CONF_ID] core.Lambda)<block_start>template_=<await>cg.templatable(config[CONF_ID] args DisplayPagePtr)<line_sep>cg.add(var.set_page(template_))<block_end><else_stmt><block_start>paren=<await>cg.get_variable(config[CONF_ID])<line_sep>cg.add(var.set_page(paren))<block_end><return>var<block_end>@automation.register_action("display.page.show_next" DisplayPageShowNextAction maybe_simple_id({cv.Required(CONF_ID):cv.templatable(cv.use_id(DisplayBuffer)) }) )<async_keyword><def_stmt>display_page_show_next_to_code config action_id template_arg args<block_start>paren=<await>cg.get_variable(config[CONF_ID])<line_sep><return>cg.new_Pvariable(action_id template_arg paren)<block_end>@automation.register_action("display.page.show_previous" DisplayPageShowPrevAction maybe_simple_id({cv.Required(CONF_ID):cv.templatable(cv.use_id(DisplayBuffer)) }) )<async_keyword><def_stmt>display_page_show_previous_to_code config action_id template_arg args<block_start>paren=<await>cg.get_variable(config[CONF_ID])<line_sep><return>cg.new_Pvariable(action_id template_arg paren)<block_end>@automation.register_condition("display.is_displaying_page" DisplayIsDisplayingPageCondition cv.maybe_simple_value({cv.GenerateID(CONF_ID):cv.use_id(DisplayBuffer) cv.Required(CONF_PAGE_ID):cv.use_id(DisplayPage) } key=CONF_PAGE_ID ) )<async_keyword><def_stmt>display_is_displaying_page_to_code config condition_id template_arg args<block_start>paren=<await>cg.get_variable(config[CONF_ID])<line_sep>page=<await>cg.get_variable(config[CONF_PAGE_ID])<line_sep>var=cg.new_Pvariable(condition_id template_arg paren)<line_sep>cg.add(var.set_page(page))<line_sep><return>var<block_end>@coroutine_with_priority(100.0)<async_keyword><def_stmt>to_code config<block_start>cg.add_global(display_ns.using)<block_end>
<import_stmt>os<import_from_stmt>os.path join dirname abspath isdir<def_stmt>in_tst_dir filename<block_start><return>join(dirname(abspath(__file__)) filename)<block_end><def_stmt>in_tst_output_dir filename<block_start>output_dir=join(dirname(abspath(__file__)) 'output')<if_stmt><not>isdir(output_dir)<block_start>os.mkdir(output_dir 0o755)<block_end><return>join(output_dir filename)<block_end>
""" Code to call the Behance API to construct a dataset. """<line_sep>#import os <import_stmt>sys<import_stmt>requests<import_stmt>bs4<import_stmt>pandas<as>pd<line_sep>#import numpy as np <import_stmt>random<import_stmt>vislab<line_sep>tags=['photo' 'blue' 'car' 'chinese ink' 'colored pencil' 'colors' 'comic' 'fashion illustration' 'graphics' 'infographic' 'landscape' 'vector' 'watercolor']<line_sep>testURL='http://www.behance.net//gallery/Icons/1140561'<line_sep>projectNum=1140561<def_stmt>get_image_url_for_photo_id id<block_start>df=get_photo_df()<line_sep><return>df.ix[id]['imageURL']<block_end><def_stmt>get_image_url_for_illustration_id id<block_start>df=get_illustration_df()<line_sep><return>df.ix[id]['image_url']<block_end><def_stmt>get_photo_df <block_start>df=pd.read_csv(vislab.config['behance_style_repo']+'/data/behanceImages.csv')<line_sep>df=df[df.label<eq>'photo']<line_sep>df=df[df['imageURL']<ne>'http://a2.behance.net/img/site/grey.png']<line_sep>df.index=['behance_photo_{}'.format(x)<for>x df.index]<line_sep><return>df<block_end><def_stmt>get_illustration_df <block_start>""" This DataFame was assembled in the notebooks load_data and processing in the ADobe-private behance_style repo. """<line_sep>df=pd.read_csv(vislab.config['behance_style_repo']+'/data/10k_illustrations_20_tags_3_images.csv' index_col=0)<line_sep><return>df<block_end><def_stmt>get_basic_dataset force=<false><block_start>""" Return DataFrame of image_id -> page_url, artist_slug, artwork_slug. """<line_sep>filename=vislab.config['paths']['shared_data']+'/wikipaintings_basic_info.h5'<line_sep>df=vislab.util.load_or_generate_df(filename fetch_basic_dataset force)<line_sep><return>df<block_end><def_stmt>_getSmallest imageModule<block_start><if_stmt><not>imageModule.has_key('sizes')<block_start><return>imageModule['src']<block_end>sizeList=imageModule['sizes']<line_sep>knownSizes=['max_1240' 'max_1920' 'original']<for_stmt>s knownSizes<block_start><if_stmt>sizeList.has_key(s)<block_start><return>sizeList[s]<block_end><block_end>print(sizeList)<line_sep><raise>Exception<block_end><def_stmt>fetch_single_project_image_URLs_via_API projectNum<block_start>query='http://www.behance.net/v2/projects/'+str(projectNum)+'?api_key='+vislab.config['behanceAPIkey']<line_sep># print('fetching project %d, query: %s'%(projectNum,query)) r=requests.get(query)<line_sep>projectInfo=r.json()['project']<line_sep>imageData=filter(<lambda>x:x['type']<eq>'image' projectInfo['modules'])<line_sep><return>map(<lambda>x:_getSmallest(x) imageData)<block_end><def_stmt>fetch_single_project_image_URLs_via_scraping page_url<block_start>r=requests.get(page_url)<line_sep>soup=bs4.BeautifulSoup(r.text)<line_sep>all_imgs=[]<for_stmt>li soup.select('li.module.image')<block_start>all_imgs<augadd>[img.attrs['src']<for>img li.find_all('img')]<block_end><return>all_imgs<block_end># set maximums to -1 in order to not have a maximum <def_stmt>fetch_basic_dataset maxRequests=10 maxImagesPerProject=2 useAPI=<true><block_start>""" Fetch basic info and page urls from a collection of projects. Results are returned as a DataFrame. """<line_sep>print("Fetching Behance dataset.")<line_sep>projectList=pd.DataFrame.from_csv('behanceProjects.csv' header=-1)<line_sep>APIkey=vislab.config['behanceAPIkey']<line_sep>numRequests=0<line_sep>random.seed(0)# fix the seed so we get the same results each time imageData=[]<for_stmt>index,row projectList.iterrows()<block_start><if_stmt>numRequests%10<eq>0<block_start>sys.stdout.write('Fetching project %d / %d \r'%(numRequests len(projectList.index)))<line_sep>sys.stdout.flush()<block_end>projectNum=row.name<line_sep>URL=row[1]<line_sep>label=row[2]<if_stmt>useAPI<block_start>imageURLs=fetch_single_project_image_URLs_via_API(projectNum)<block_end><else_stmt><block_start>imageURLs=fetch_single_project_image_URLs_via_scraping(URL)<block_end><if_stmt>len(imageURLs)<le>maxImagesPerProject<or>maxImagesPerProject<le>0<block_start>pickedImageURLs=imageURLs<block_end><else_stmt><block_start>pickedImageURLs=random.sample(imageURLs maxImagesPerProject)<block_end><for_stmt>u pickedImageURLs<block_start>imageData.append({'projectNum':projectNum 'projectURL':URL 'label':label 'imageURL':u})<block_end>numRequests=numRequests+1<if_stmt>maxRequests<g>0<and>numRequests<ge>maxRequests<block_start><break><block_end><block_end>df=pd.DataFrame(imageData)<line_sep><return>df<block_end><if_stmt>__name__<eq>'__main__'<block_start>""" Run the scraping with a number of workers taking jobs from a queue. """<line_sep>df=fetch_basic_dataset(maxRequests=-1 maxImagesPerProject=-1 useAPI=<false>)<line_sep>df.to_csv('behanceImages.csv')<line_sep>print(df)<block_end>
<import_stmt>math<import_stmt>cairo<import_stmt>gi<line_sep>gi.require_version('Gtk' '3.0')<import_from_stmt>gi.repository Gtk GLib# nopep8 <class_stmt>Animator(Gtk.DrawingArea)<block_start><def_stmt>__init__ self **properties<block_start>super().__init__(**properties)<line_sep>self.set_size_request(200 80)<line_sep>self.connect("draw" self.do_drawing)<line_sep>GLib.timeout_add(50 self.tick)<block_end><def_stmt>tick self<block_start>self.queue_draw()<line_sep><return><true><block_end><def_stmt>do_drawing self widget cr<block_start>self.draw(cr self.get_allocated_width() self.get_allocated_height())<block_end><def_stmt>draw self ctx width height<block_start><pass><block_end><block_end><class_stmt>ListeningAnimator(Animator)<block_start><def_stmt>__init__ self window **properties<block_start>super().__init__(**properties)<line_sep>self.window=window<line_sep>self.tc=0<block_end><def_stmt>draw self ctx width height<block_start>self.tc<augadd>0.2<line_sep>self.tc<augmod>2<times>math.pi<for_stmt>i range(-4 5)<block_start>ctx.set_source_rgb(0.2 0.5 1)<line_sep>ctx.set_line_width(6)<line_sep>ctx.set_line_cap(cairo.LINE_CAP_ROUND)<if_stmt>i%2<eq>0<block_start>ctx.move_to(width/2+i<times>10 height/2+3-8<times>math.sin(self.tc+i))<line_sep>ctx.line_to(width/2+i<times>10 height/2-3+8<times>math.sin(self.tc+i))<block_end><else_stmt><block_start>ctx.set_source_rgb(0.2 0.7 1)<line_sep>ctx.move_to(width/2+i<times>10 height/2+3-8<times>math.cos(self.tc-i))<line_sep>ctx.line_to(width/2+i<times>10 height/2-3+8<times>math.cos(self.tc-i))<block_end>ctx.stroke()<block_end><block_end><block_end><class_stmt>ThinkingAnimator(Animator)<block_start><def_stmt>__init__ self window **properties<block_start>super().__init__(**properties)<line_sep>self.window=window<line_sep>self.rot=0<line_sep>self.x,self.y=0 0<line_sep>self.rad=20<block_end><def_stmt>draw self ctx width height<block_start>self.x,self.y=width/2 height/2<line_sep>self.rot<augadd>0.2<line_sep>self.rot<augmod>2<times>math.pi<for_stmt>i range(-2 2)<block_start>ctx.set_source_rgb(0.2 0.7 1)<line_sep>ctx.arc(self.x+i<times>20 self.y 8<times>math.cos(self.rot-i/2) 0 2<times>math.pi)<line_sep>ctx.fill()<block_end><block_end><block_end>
<import_from_stmt>collections Counter<import_from_stmt>copy copy<import_stmt>json<import_from_stmt>tqdm tqdm<import_from_stmt>search.search bulk_text_query<import_from_stmt>utils.general chunks<def_stmt>main <block_start><import_stmt>argparse<line_sep>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('split' choices=['train' 'dev'])<line_sep>args=parser.parse_args()<if_stmt>args.split<eq>'train'<block_start>filename='data/hotpotqa/hotpot_train_v1.1.json'<line_sep>outputname='data/hotpotqa/hotpot_train_single_hop.json'<block_end><else_stmt><block_start>filename='data/hotpotqa/hotpot_dev_fullwiki_v1.json'<line_sep>outputname='data/hotpotqa/hotpot_dev_single_hop.json'<block_end>batch_size=64<with_stmt>open(filename)<as>f<block_start>data=json.load(f)<block_end>outputdata=[]<line_sep>processed=0<for_stmt>batch tqdm(chunks(data batch_size) total=(len(data)+batch_size-1)<floordiv>batch_size)<block_start>queries=[x['question']<for>x batch]<line_sep>res=bulk_text_query(queries topn=10 lazy=<false>)<for_stmt>r,d zip(res batch)<block_start>d1=copy(d)<line_sep>context=[item['data_object']<for>item r]<line_sep>context=[(x['title'] x['text'])<for>x context]<line_sep>d1['context']=context<line_sep>outputdata.append(d1)<block_end>processed<augadd>len(batch)<block_end><with_stmt>open(outputname 'w')<as>f<block_start>json.dump(outputdata f)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>numpy<as>np<import_stmt>tigre<import_stmt>tigre.algorithms<as>algs<import_from_stmt>tigre.utilities sample_loader<import_from_stmt>tigre.utilities.Measure_Quality Measure_Quality<import_stmt>tigre.utilities.gpu<as>gpu<import_stmt>matplotlib.pyplot<as>plt<line_sep>### This is just a basic example of very few TIGRE functionallity. # We hihgly recomend checking the Demos folder, where most if not all features of tigre are demoed. listGpuNames=gpu.getGpuNames()<if_stmt>len(listGpuNames)<eq>0<block_start>print("Error: No gpu found")<block_end><else_stmt><block_start><for_stmt>id range(len(listGpuNames))<block_start>print("{}: {}".format(id listGpuNames[id]))<block_end><block_end>gpuids=gpu.getGpuIds(listGpuNames[0])<line_sep>print(gpuids)<line_sep># Geometry # geo1 = tigre.geometry(mode='cone', high_resolution=False, default=True) geo=tigre.geometry(mode="cone" nVoxel=np.array([256 256 256]) default=<true>)<line_sep>geo.dDetector=np.array([0.8 0.8])<times>2# size of each pixel (mm) geo.sDetector=geo.dDetector<times>geo.nDetector<line_sep># print(geo) nangles=100<line_sep>angles=np.linspace(0 2<times>np.pi nangles endpoint=<false> dtype=np.float32)<line_sep># Prepare projection data head=sample_loader.load_head_phantom(geo.nVoxel)<line_sep>proj=tigre.Ax(head geo angles gpuids=gpuids)<line_sep># Reconstruct niter=20<line_sep>fdkout=algs.fdk(proj geo angles gpuids=gpuids)<line_sep>ossart=algs.ossart(proj geo angles niter blocksize=20 gpuids=gpuids)<line_sep># Measure Quality # 'RMSE', 'MSSIM', 'SSD', 'UQI' print("RMSE fdk:")<line_sep>print(Measure_Quality(fdkout head ["nRMSE"]))<line_sep>print("RMSE ossart")<line_sep>print(Measure_Quality(ossart head ["nRMSE"]))<line_sep># Plot fig,axes=plt.subplots(3 2)<line_sep>axes[0 0].set_title("FDK")<line_sep>axes[0 0].imshow(fdkout[geo.nVoxel[0]<floordiv>2])<line_sep>axes[1 0].imshow(fdkout[: geo.nVoxel[1]<floordiv>2 :])<line_sep>axes[2 0].imshow(fdkout[: : geo.nVoxel[2]<floordiv>2])<line_sep>axes[0 1].set_title("OS-SART")<line_sep>axes[0 1].imshow(ossart[geo.nVoxel[0]<floordiv>2])<line_sep>axes[1 1].imshow(ossart[: geo.nVoxel[1]<floordiv>2 :])<line_sep>axes[2 1].imshow(ossart[: : geo.nVoxel[2]<floordiv>2])<line_sep>plt.show()<line_sep># tigre.plotProj(proj) # tigre.plotImg(fdkout)
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services."""<import_stmt>grpc<import_from_stmt>clusterfuzz._internal.protos untrusted_runner_pb2<as>clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2<class_stmt>UntrustedRunnerStub(object)<block_start>"""UntrusterRunner service. """<def_stmt>__init__ self channel<block_start>"""Constructor. Args: channel: A grpc.Channel. """<line_sep>self.GetStatus=channel.unary_unary('/UntrustedRunner/GetStatus' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetStatusRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetStatusResponse.FromString )<line_sep>self.SetupRegularBuild=channel.unary_unary('/UntrustedRunner/SetupRegularBuild' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SetupRegularBuildRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SetupBuildResponse.FromString )<line_sep>self.RunProcess=channel.unary_unary('/UntrustedRunner/RunProcess' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunProcessRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunProcessResponse.FromString )<line_sep>self.RunAndWait=channel.unary_unary('/UntrustedRunner/RunAndWait' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunAndWaitRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunAndWaitResponse.FromString )<line_sep>self.CreateDirectory=channel.unary_unary('/UntrustedRunner/CreateDirectory' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CreateDirectoryRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CreateDirectoryResponse.FromString )<line_sep>self.RemoveDirectory=channel.unary_unary('/UntrustedRunner/RemoveDirectory' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RemoveDirectoryRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RemoveDirectoryResponse.FromString )<line_sep>self.ListFiles=channel.unary_unary('/UntrustedRunner/ListFiles' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ListFilesRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ListFilesResponse.FromString )<line_sep>self.CopyFileTo=channel.stream_unary('/UntrustedRunner/CopyFileTo' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.FileChunk.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CopyFileToResponse.FromString )<line_sep>self.CopyFileFrom=channel.unary_stream('/UntrustedRunner/CopyFileFrom' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CopyFileFromRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.FileChunk.FromString )<line_sep>self.Stat=channel.unary_unary('/UntrustedRunner/Stat' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.StatRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.StatResponse.FromString )<line_sep>self.UpdateEnvironment=channel.unary_unary('/UntrustedRunner/UpdateEnvironment' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateEnvironmentRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateEnvironmentResponse.FromString )<line_sep>self.ResetEnvironment=channel.unary_unary('/UntrustedRunner/ResetEnvironment' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ResetEnvironmentRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ResetEnvironmentResponse.FromString )<line_sep>self.UpdateSource=channel.unary_unary('/UntrustedRunner/UpdateSource' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateSourceRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateSourceResponse.FromString )<line_sep>self.SymbolizeStacktrace=channel.unary_unary('/UntrustedRunner/SymbolizeStacktrace' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SymbolizeStacktraceRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SymbolizeStacktraceResponse.FromString )<line_sep>self.TerminateStaleApplicationInstances=channel.unary_unary('/UntrustedRunner/TerminateStaleApplicationInstances' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.TerminateStaleApplicationInstancesRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.TerminateStaleApplicationInstancesResponse.FromString )<line_sep>self.GetFuzzTargets=channel.unary_unary('/UntrustedRunner/GetFuzzTargets' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetFuzzTargetsRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetFuzzTargetsResponse.FromString )<line_sep>self.PruneCorpus=channel.unary_unary('/UntrustedRunner/PruneCorpus' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.PruneCorpusRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.PruneCorpusResponse.FromString )<line_sep>self.ProcessTestcase=channel.unary_unary('/UntrustedRunner/ProcessTestcase' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ProcessTestcaseRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceResult.FromString )<line_sep>self.EngineFuzz=channel.unary_unary('/UntrustedRunner/EngineFuzz' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineFuzzRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineFuzzResponse.FromString )<line_sep>self.EngineReproduce=channel.unary_unary('/UntrustedRunner/EngineReproduce' request_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceRequest.SerializeToString response_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceResult.FromString )<block_end><block_end><class_stmt>UntrustedRunnerServicer(object)<block_start>"""UntrusterRunner service. """<def_stmt>GetStatus self request context<block_start>"""Get information about the worker. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>SetupRegularBuild self request context<block_start>"""Set up regular build. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>RunProcess self request context<block_start>"""Run command using process_handler.runProcess """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>RunAndWait self request context<block_start>"""Run command using new_process.ProcessRunner """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>CreateDirectory self request context<block_start>"""Create a directory. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>RemoveDirectory self request context<block_start>"""Remove a directory. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>ListFiles self request context<block_start>"""List files in a directory. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>CopyFileTo self request_iterator context<block_start>"""Copy file from host to worker. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>CopyFileFrom self request context<block_start>"""Copy file from worker to host. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>Stat self request context<block_start>"""Call stat() on a path. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>UpdateEnvironment self request context<block_start>"""Environment variable changes. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>ResetEnvironment self request context<block_start>"""Reset environment variables. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>UpdateSource self request context<block_start>"""Update source. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>SymbolizeStacktrace self request context<block_start>"""Symbolize a stacktrace. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>TerminateStaleApplicationInstances self request context<block_start>"""Terminate stale application instances. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>GetFuzzTargets self request context<block_start>"""libFuzzer/AFL specific: get list of fuzz targets. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>PruneCorpus self request context<block_start>"""libFuzzer specific: corpus pruning """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>ProcessTestcase self request context<block_start>"""Engine specific: Do testcase minimization or cleanse. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>EngineFuzz self request context<block_start>"""Engine specific: Do fuzzing. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><def_stmt>EngineReproduce self request context<block_start>"""Engine specific: Do reproduction. """<line_sep>context.set_code(grpc.StatusCode.UNIMPLEMENTED)<line_sep>context.set_details('Method not implemented!')<line_sep><raise>NotImplementedError('Method not implemented!')<block_end><block_end><def_stmt>add_UntrustedRunnerServicer_to_server servicer server<block_start>rpc_method_handlers={'GetStatus':grpc.unary_unary_rpc_method_handler(servicer.GetStatus request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetStatusRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetStatusResponse.SerializeToString ) 'SetupRegularBuild':grpc.unary_unary_rpc_method_handler(servicer.SetupRegularBuild request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SetupRegularBuildRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SetupBuildResponse.SerializeToString ) 'RunProcess':grpc.unary_unary_rpc_method_handler(servicer.RunProcess request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunProcessRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunProcessResponse.SerializeToString ) 'RunAndWait':grpc.unary_unary_rpc_method_handler(servicer.RunAndWait request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunAndWaitRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunAndWaitResponse.SerializeToString ) 'CreateDirectory':grpc.unary_unary_rpc_method_handler(servicer.CreateDirectory request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CreateDirectoryRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CreateDirectoryResponse.SerializeToString ) 'RemoveDirectory':grpc.unary_unary_rpc_method_handler(servicer.RemoveDirectory request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RemoveDirectoryRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RemoveDirectoryResponse.SerializeToString ) 'ListFiles':grpc.unary_unary_rpc_method_handler(servicer.ListFiles request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ListFilesRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ListFilesResponse.SerializeToString ) 'CopyFileTo':grpc.stream_unary_rpc_method_handler(servicer.CopyFileTo request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.FileChunk.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CopyFileToResponse.SerializeToString ) 'CopyFileFrom':grpc.unary_stream_rpc_method_handler(servicer.CopyFileFrom request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CopyFileFromRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.FileChunk.SerializeToString ) 'Stat':grpc.unary_unary_rpc_method_handler(servicer.Stat request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.StatRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.StatResponse.SerializeToString ) 'UpdateEnvironment':grpc.unary_unary_rpc_method_handler(servicer.UpdateEnvironment request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateEnvironmentRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateEnvironmentResponse.SerializeToString ) 'ResetEnvironment':grpc.unary_unary_rpc_method_handler(servicer.ResetEnvironment request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ResetEnvironmentRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ResetEnvironmentResponse.SerializeToString ) 'UpdateSource':grpc.unary_unary_rpc_method_handler(servicer.UpdateSource request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateSourceRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateSourceResponse.SerializeToString ) 'SymbolizeStacktrace':grpc.unary_unary_rpc_method_handler(servicer.SymbolizeStacktrace request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SymbolizeStacktraceRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SymbolizeStacktraceResponse.SerializeToString ) 'TerminateStaleApplicationInstances':grpc.unary_unary_rpc_method_handler(servicer.TerminateStaleApplicationInstances request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.TerminateStaleApplicationInstancesRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.TerminateStaleApplicationInstancesResponse.SerializeToString ) 'GetFuzzTargets':grpc.unary_unary_rpc_method_handler(servicer.GetFuzzTargets request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetFuzzTargetsRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetFuzzTargetsResponse.SerializeToString ) 'PruneCorpus':grpc.unary_unary_rpc_method_handler(servicer.PruneCorpus request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.PruneCorpusRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.PruneCorpusResponse.SerializeToString ) 'ProcessTestcase':grpc.unary_unary_rpc_method_handler(servicer.ProcessTestcase request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ProcessTestcaseRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceResult.SerializeToString ) 'EngineFuzz':grpc.unary_unary_rpc_method_handler(servicer.EngineFuzz request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineFuzzRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineFuzzResponse.SerializeToString ) 'EngineReproduce':grpc.unary_unary_rpc_method_handler(servicer.EngineReproduce request_deserializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceRequest.FromString response_serializer=clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceResult.SerializeToString ) }<line_sep>generic_handler=grpc.method_handlers_generic_handler('UntrustedRunner' rpc_method_handlers)<line_sep>server.add_generic_rpc_handlers((generic_handler ))<block_end># This class is part of an EXPERIMENTAL API. <class_stmt>UntrustedRunner(object)<block_start>"""UntrusterRunner service. """<line_sep>@staticmethod<def_stmt>GetStatus request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/GetStatus' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetStatusRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetStatusResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>SetupRegularBuild request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/SetupRegularBuild' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SetupRegularBuildRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SetupBuildResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>RunProcess request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/RunProcess' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunProcessRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunProcessResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>RunAndWait request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/RunAndWait' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunAndWaitRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RunAndWaitResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>CreateDirectory request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/CreateDirectory' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CreateDirectoryRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CreateDirectoryResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>RemoveDirectory request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/RemoveDirectory' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RemoveDirectoryRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.RemoveDirectoryResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>ListFiles request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/ListFiles' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ListFilesRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ListFilesResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>CopyFileTo request_iterator target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.stream_unary(request_iterator target '/UntrustedRunner/CopyFileTo' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.FileChunk.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CopyFileToResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>CopyFileFrom request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_stream(request target '/UntrustedRunner/CopyFileFrom' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.CopyFileFromRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.FileChunk.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>Stat request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/Stat' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.StatRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.StatResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>UpdateEnvironment request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/UpdateEnvironment' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateEnvironmentRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateEnvironmentResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>ResetEnvironment request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/ResetEnvironment' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ResetEnvironmentRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ResetEnvironmentResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>UpdateSource request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/UpdateSource' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateSourceRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.UpdateSourceResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>SymbolizeStacktrace request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/SymbolizeStacktrace' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SymbolizeStacktraceRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.SymbolizeStacktraceResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>TerminateStaleApplicationInstances request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/TerminateStaleApplicationInstances' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.TerminateStaleApplicationInstancesRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.TerminateStaleApplicationInstancesResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>GetFuzzTargets request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/GetFuzzTargets' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetFuzzTargetsRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.GetFuzzTargetsResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>PruneCorpus request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/PruneCorpus' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.PruneCorpusRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.PruneCorpusResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>ProcessTestcase request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/ProcessTestcase' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.ProcessTestcaseRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceResult.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>EngineFuzz request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/EngineFuzz' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineFuzzRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineFuzzResponse.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end>@staticmethod<def_stmt>EngineReproduce request target options=() channel_credentials=<none> call_credentials=<none> insecure=<false> compression=<none> wait_for_ready=<none> timeout=<none> metadata=<none><block_start><return>grpc.experimental.unary_unary(request target '/UntrustedRunner/EngineReproduce' clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceRequest.SerializeToString clusterfuzz_dot___internal_dot_protos_dot_untrusted__runner__pb2.EngineReproduceResult.FromString options channel_credentials insecure call_credentials compression wait_for_ready timeout metadata)<block_end><block_end>
"""A wrapper class for conducting multiple experiments, scheduling jobs and saving results. """<import_from_stmt>typing Callable Type Union<import_from_stmt>hypertunity optimisation reports utils<import_from_stmt>hypertunity.domain Domain<import_from_stmt>hypertunity.optimisation Optimiser<import_from_stmt>hypertunity.reports Reporter<import_from_stmt>hypertunity.scheduling Job Scheduler SlurmJob<line_sep>__all__=["Trial"]<line_sep>OptimiserTypes=Union[str Type[Optimiser] Optimiser]<line_sep>ReporterTypes=Union[str Type[Reporter] Reporter]<class_stmt>Trial<block_start>"""High-level API class for running hyperparameter optimisation. This class encapsulates optimiser querying, job building, scheduling and results collection as well as checkpointing and report generation. """<line_sep>@utils.support_american_spelling<def_stmt>__init__ self objective:Union[Callable str] domain:Domain optimiser:OptimiserTypes="bo" reporter:ReporterTypes="table" device:str="local" **kwargs<block_start>"""Initialise the :class:`Trial` experiment manager. Args: objective: :obj:`Callable` or :obj:`str`. The objective function or script to run. domain: :class:`Domain`. The optimisation domain of the objective function. optimiser: :class:`Optimiser` or :obj:`str`. The optimiser method for domain sampling. reporter: :class:`Reporter` or :obj:`str`. The reporting method for the results. device: :obj:`str`. The host device running the evaluations. Can be 'local' or 'slurm'. **kwargs: additional parameters for the optimiser, reporter and scheduler. Keyword Args: timeout: :obj:`float`. The number of seconds to wait for a :class:`Job` instance to finish. Default is 259200 seconds, or approximately 3 days. """<line_sep>self.objective=objective<line_sep>self.domain=domain<line_sep>self.optimiser=self._init_optimiser(optimiser **kwargs)<line_sep>self.reporter=self._init_reporter(reporter **kwargs)<line_sep>self.scheduler=Scheduler<line_sep># 259200 is the number of seconds contained in 3 days self._timeout=kwargs.get("timeout" 259200.0)<line_sep>self._job=self._init_job(device)<block_end><def_stmt>_init_optimiser self optimiser:OptimiserTypes **kwargs<arrow>Optimiser<block_start><if_stmt>isinstance(optimiser str)<block_start>optimiser_class=get_optimiser(optimiser)<block_end><elif_stmt>issubclass(optimiser Optimiser)<block_start>optimiser_class=optimiser<block_end><elif_stmt>isinstance(optimiser Optimiser)<block_start><return>optimiser<block_end><else_stmt><block_start><raise>TypeError("An optimiser must be a either a string, "<concat>"an Optimiser type or an Optimiser instance.")<block_end>opt_kwargs={}<if_stmt>"seed"<in>kwargs<block_start>opt_kwargs["seed"]=kwargs["seed"]<block_end><return>optimiser_class(self.domain **opt_kwargs)<block_end><def_stmt>_init_reporter self reporter:ReporterTypes **kwargs<arrow>Reporter<block_start><if_stmt>isinstance(reporter str)<block_start>reporter_class=get_reporter(reporter)<block_end><elif_stmt>issubclass(reporter Reporter)<block_start>reporter_class=reporter<block_end><elif_stmt>isinstance(reporter Reporter)<block_start><return>reporter<block_end><else_stmt><block_start><raise>TypeError("A reporter must be either a string, "<concat>"a Reporter type or a Reporter instance.")<block_end>rep_kwargs={"metrics":kwargs.get("metrics" ["score"]) "database_path":kwargs.get("database_path" ".")}<if_stmt><not>issubclass(reporter_class reports.Table)<block_start>rep_kwargs["logdir"]=kwargs.get("logdir" "tensorboard/")<block_end><return>reporter_class(self.domain **rep_kwargs)<block_end>@staticmethod<def_stmt>_init_job device:str<arrow>Type[Job]<block_start>device=device.lower()<if_stmt>device<eq>"local"<block_start><return>Job<block_end><if_stmt>device<eq>"slurm"<block_start><return>SlurmJob<block_end><raise>ValueError(f"Unknown device {device}. Select one from {{'local', 'slurm'}}.")<block_end><def_stmt>run self n_steps:int n_parallel:int=1 **kwargs<block_start>"""Run the optimisation and objective function evaluation. Args: n_steps: :obj:`int`. The total number of optimisation steps. n_parallel: (optional) :obj:`int`. The number of jobs that can be scheduled at once. **kwargs: additional keyword arguments for the optimisation, supplied to the :py:meth:`run_step` method of the :class:`Optimiser` instance. Keyword Args: batch_size: (optional) :obj:`int`. The number of samples that are suggested at once. Default is 1. minimise: (optional) :obj:`bool`. If the optimiser is :class:`BayesianOptimisation` then this flag tells whether the objective function is being minimised or maximised. Otherwise it has no effect. Default is `False`. """<line_sep>batch_size=kwargs.get("batch_size" 1)<line_sep>n_parallel=min(n_parallel batch_size)<with_stmt>self.scheduler(n_parallel=n_parallel)<as>scheduler<block_start><for_stmt>i range(n_steps)<block_start>samples=self.optimiser.run_step(batch_size=batch_size minimise=kwargs.get("minimise" <false>))<line_sep>jobs=[self._job(task=self.objective args=s.as_dict())<for>s samples]<line_sep>scheduler.dispatch(jobs)<line_sep>evaluations=[r.data<for>r scheduler.collect(n_results=batch_size timeout=self._timeout)]<line_sep>self.optimiser.update(samples evaluations)<for_stmt>s,e,j zip(samples evaluations jobs)<block_start>self.reporter.log((s e) meta={"job_id":j.id})<block_end><block_end><block_end><block_end><block_end><def_stmt>get_optimiser name:str<arrow>Type[Optimiser]<block_start>name=name.lower()<if_stmt>name.startswith(("bayes" "bo"))<block_start><return>optimisation.BayesianOptimisation<block_end><if_stmt>name.startswith("random")<block_start><return>optimisation.RandomSearch<block_end><if_stmt>name.startswith(("grid" "exhaustive"))<block_start><return>optimisation.GridSearch<block_end><raise>ValueError(f"Unknown optimiser {name}. Select one from "<concat>f"{{'bayesian_optimisation', 'random_search', 'grid_search'}}.")<block_end><def_stmt>get_reporter name:str<arrow>Type[Reporter]<block_start>name=name.lower()<if_stmt>name.startswith("table")<block_start><return>reports.Table<block_end><if_stmt>name.startswith(("tensor" "tb"))<block_start><import_stmt>reports.tensorboard<as>tb<line_sep><return>tb.Tensorboard<block_end><raise>ValueError(f"Unknown reporter {name}. Select one from {{'table', 'tensorboard'}}.")<block_end>
""" generate user goal for collecting new multiwoz data """<import_from_stmt>convlab2.task.multiwoz.goal_generator GoalGenerator<import_from_stmt>convlab2.util.file_util read_zipped_json<import_stmt>random<import_stmt>numpy<as>np<import_stmt>json<import_stmt>datetime<import_from_stmt>pprint pprint<def_stmt>extract_slot_combination_from_goal goal<block_start>domains=['attraction' 'hotel' 'restaurant' 'police' 'hospital' 'taxi' 'train']<line_sep>serialized_goal=[]<for_stmt>domain goal<block_start><if_stmt>domain<in>domains<block_start><for_stmt>scope,content goal[domain].items()<block_start><if_stmt>content# if isinstance(content, dict): # for slot, value in content.items(): # serialized_goal.append("{}-{}-{}-{}".format(domain, scope, slot, value)) # else: # for slot in content: # serialized_goal.append("{}-{}-{}".format(domain, scope, slot)) <block_start><for_stmt>slot content<block_start>serialized_goal.append("{}-{}-{}".format(domain scope slot))<block_end><block_end><block_end><block_end><block_end><return>sorted(serialized_goal)<block_end><def_stmt>test_generate_overlap total_num=1000 seed=42 output_file='goal.json'<block_start>train_data=read_zipped_json('../../../data/multiwoz/train.json.zip' 'train.json')<line_sep>train_serialized_goals=[]<for_stmt>d train_data<block_start>train_serialized_goals.append(extract_slot_combination_from_goal(train_data[d]['goal']))<block_end>test_data=read_zipped_json('../../../data/multiwoz/test.json.zip' 'test.json')<line_sep>test_serialized_goals=[]<for_stmt>d test_data<block_start>test_serialized_goals.append(extract_slot_combination_from_goal(test_data[d]['goal']))<block_end>overlap=0<for_stmt>serialized_goal test_serialized_goals<block_start><if_stmt>serialized_goal<in>train_serialized_goals<block_start>overlap<augadd>1<block_end><block_end>print(len(train_serialized_goals) len(test_serialized_goals) overlap)# 8434 1000 430 random.seed(seed)<line_sep>np.random.seed(seed)<line_sep>goal_generator=GoalGenerator()<line_sep>goals=[]<line_sep>avg_domains=[]<line_sep>serialized_goals=[]<while_stmt>len(goals)<l>total_num<block_start>goal=goal_generator.get_user_goal()<line_sep># pprint(goal) <if_stmt>'police'<in>goal['domain_ordering']<block_start>no_police=list(goal['domain_ordering'])<line_sep>no_police.remove('police')<line_sep>goal['domain_ordering']=tuple(no_police)<del_stmt>goal['police']<block_end><try_stmt><block_start>message=goal_generator.build_message(goal)[1]<block_end><except_stmt><block_start><continue><block_end># print(message) avg_domains.append(len(goal['domain_ordering']))<line_sep>goals.append({"goals":[] "ori_goals":goal "description":message "timestamp":str(datetime.datetime.now()) "ID":len(goals)})<line_sep>serialized_goals.append(extract_slot_combination_from_goal(goal))<if_stmt>len(serialized_goals)<eq>1<block_start>print(serialized_goals)<block_end><block_end>overlap=0<for_stmt>serialized_goal serialized_goals<block_start><if_stmt>serialized_goal<in>train_serialized_goals<block_start>overlap<augadd>1<block_end><block_end>print(len(train_serialized_goals) len(serialized_goals) overlap)<block_end># 8434 1000 199 <def_stmt>generate total_num=1000 seed=42 output_file='goal.json'<block_start>random.seed(seed)<line_sep>np.random.seed(seed)<line_sep>goal_generator=GoalGenerator()<line_sep>goals=[]<line_sep>avg_domains=[]<while_stmt>len(goals)<l>total_num<block_start>goal=goal_generator.get_user_goal()<line_sep># pprint(goal) <if_stmt>'police'<in>goal['domain_ordering']<block_start>no_police=list(goal['domain_ordering'])<line_sep>no_police.remove('police')<line_sep>goal['domain_ordering']=tuple(no_police)<del_stmt>goal['police']<block_end><try_stmt><block_start>message=goal_generator.build_message(goal)[1]<block_end><except_stmt><block_start><continue><block_end># print(message) avg_domains.append(len(goal['domain_ordering']))<line_sep>goals.append({"goals":[] "ori_goals":goal "description":message "timestamp":str(datetime.datetime.now()) "ID":len(goals)})<block_end>print('avg domains:' np.mean(avg_domains))# avg domains: 1.846 json.dump(goals open(output_file 'w') indent=4)<block_end><if_stmt>__name__<eq>'__main__'<block_start>generate(output_file='goal20200629.json')<block_end>
# Copyright 2020 Makani Technologies LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Scoring functions relating to the buoy."""<import_from_stmt>makani.lib.python.batch_sim scoring_functions<import_from_stmt>makani.lib.python.h5_utils numpy_utils<import_stmt>numpy<as>np<class_stmt>BuoyWaterLineScoringFunction(scoring_functions.SingleSidedLimitScoringFunction)<block_start>"""Score to evaluate the highest point that the water line reaches."""<def_stmt>__init__ self good_limit bad_limit severity<block_start>super(BuoyWaterLineScoringFunction self).__init__('Buoy Min. Water Line Distance to Threshold' 'm' good_limit bad_limit severity)<assert_stmt>good_limit<g>bad_limit<block_end><def_stmt>GetSystemLabels self<block_start><return>['offshore' 'buoy']<block_end><def_stmt>GetValue self output<block_start><return>output['water_line_min']<block_end><def_stmt>GetOutput self timeseries<block_start><return>{'water_line_min':np.min(timeseries['water_line'])}<block_end><def_stmt>GetTimeSeries self params sim control<block_start>water_line=self._SelectTelemetry(sim control 'water_line')<line_sep><return>{'water_line':water_line}<block_end><block_end><class_stmt>BuoyYawAngleScoringFunction(scoring_functions.SingleSidedLimitScoringFunction)<block_start>"""Score to evaluate the maximum/minimum yaw angle."""<def_stmt>__init__ self good_limit bad_limit severity<block_start>super(BuoyYawAngleScoringFunction self).__init__('Buoy Peak Yaw Angle From Equilibrium' 'deg' good_limit bad_limit severity)<assert_stmt>good_limit<l>bad_limit<block_end><def_stmt>GetSystemLabels self<block_start><return>['offshore' 'buoy']<block_end><def_stmt>GetValue self output<block_start><return>output['peak_buoy_yaw_angle_deg']<block_end><def_stmt>GetOutput self timeseries<block_start>buoy_yaw_angle_from_eq_deg=timeseries['buoy_yaw_angle_from_eq_deg']<line_sep><return>{'peak_buoy_yaw_angle_deg':np.max(np.fabs(buoy_yaw_angle_from_eq_deg))}<block_end><def_stmt>GetTimeSeries self params sim control<block_start>buoy_yaw_angle_from_eq=self._SelectTelemetry(sim control 'buoy_yaw_angle_from_eq')<line_sep><return>{'buoy_yaw_angle_from_eq_deg':np.degrees(buoy_yaw_angle_from_eq)}<block_end><block_end><class_stmt>BuoyVesselOriginAccelScoringFunction(scoring_functions.SingleSidedLimitScoringFunction)<block_start>"""Score to evaluate the maximum acceleration of the vessel frame origin."""<def_stmt>__init__ self good_limit bad_limit severity<block_start>super(BuoyVesselOriginAccelScoringFunction self).__init__('Buoy Vessel Origin Acceleration' 'g' good_limit bad_limit severity)<assert_stmt>good_limit<l>bad_limit<block_end><def_stmt>GetSystemLabels self<block_start><return>['offshore' 'buoy']<block_end><def_stmt>GetValue self output<block_start><return>output['peak_buoy_accel_norm_gs']<block_end><def_stmt>GetOutput self timeseries<block_start>buoy_accel_norm_gs=timeseries['buoy_accel_norm_gs']<line_sep><return>{'peak_buoy_accel_norm_gs':np.max(buoy_accel_norm_gs)}<block_end><def_stmt>GetTimeSeries self params sim control<block_start>buoy_accel_g=self._SelectTelemetry(sim control 'buoy_accel_g')<try_stmt><block_start>buoy_accel_g_norm=np.sum(np.abs(numpy_utils.Vec3ToArray(buoy_accel_g))<power>2 axis=-1)<power>(1./2)<block_end><except_stmt>(TypeError ValueError)<block_start>buoy_accel_g_norm=np.array([float('nan')])<block_end><return>{'buoy_accel_norm_gs':buoy_accel_g_norm/9.81}<block_end><block_end>
"""Used to test `get_type_hints()` on a cross-module inherited `TypedDict` class This script uses future annotations to postpone a type that won't be available on the module inheriting from to `Foo`. The subclass in the other module should look something like this: class Bar(_typed_dict_helper.Foo, total=False): b: int """<import_from_future_stmt> annotations<import_from_stmt>typing Optional TypedDict<line_sep>OptionalIntType=Optional[int]<class_stmt>Foo(TypedDict)<block_start>a:OptionalIntType<block_end>
# Copyright (c) 2009-2021 The Regents of the University of Michigan # This file is part of the HOOMD-blue project, released under the BSD 3-Clause License. R""" Metal pair potentials. """<import_from_stmt>hoomd.md force<import_from_stmt>hoomd.md nlist<as>nl# to avoid naming conflicts <import_stmt>hoomd<import_from_stmt>hoomd _hoomd<import_from_stmt>hoomd.md _md<import_from_stmt>hoomd.metal _metal<import_stmt>math<import_stmt>sys<class_stmt>eam(force._force)<block_start>R""" EAM pair potential. Args: file (str): File name with potential tables in Alloy or FS format type (str): Type of file potential ('Alloy', 'FS') nlist (:py:mod:`hoomd.md.nlist`): Neighbor list (default of None automatically creates a global cell-list based neighbor list) :py:class:`eam` specifies that a EAM (embedded atom method) pair potential should be applied between every non-excluded particle pair in the simulation. No coefficients need to be set for :py:class:`eam`. All specifications, including the cutoff radius, form of the potential, etc. are read in from the specified file. Particle type names must match those referenced in the EAM potential file. Particle mass (in atomic mass) **must** be set in the input script, users are allowed to set different mass values other than those in the potential file. Two file formats are supported: *Alloy* and *FS*. They are described in LAMMPS documentation (commands eam/alloy and eam/fs) here: http://lammps.sandia.gov/doc/pair_eam.html and are also described here: http://enpub.fulton.asu.edu/cms/potentials/submain/format.htm .. attention:: EAM is **NOT** supported in MPI parallel simulations. Example:: nl = nlist.cell() eam = pair.eam(file='name.eam.fs', type='FS', nlist=nl) eam = pair.eam(file='name.eam.alloy', type='Alloy', nlist=nl) """<def_stmt>__init__ self file type nlist# Error out in MPI simulations <block_start><if_stmt>(hoomd.version.mpi_enabled)<block_start><if_stmt>hoomd.context.current.system_definition.getParticleData().getDomainDecomposition()<block_start>hoomd.context.current.device.cpp_msg.error("pair.eam is not supported in multi-processor simulations.\n\n")<line_sep><raise>RuntimeError("Error setting up pair potential.")<block_end><block_end># initialize the base class force._force.__init__(self)<line_sep># Translate type <if_stmt>(type<eq>'Alloy')<block_start>type_of_file=0<block_end><elif_stmt>(type<eq>'FS')<block_start>type_of_file=1<block_end><else_stmt><block_start><raise>RuntimeError('Unknown EAM input file type')<block_end># create the c++ mirror class <if_stmt><not>hoomd.context.current.device.cpp_exec_conf.isCUDAEnabled()<block_start>self.cpp_force=_metal.EAMForceCompute(hoomd.context.current.system_definition file type_of_file)<block_end><else_stmt><block_start>self.cpp_force=_metal.EAMForceComputeGPU(hoomd.context.current.system_definition file type_of_file)<block_end>#After load EAMForceCompute we know r_cut from EAM potential`s file. We need update neighbor list. self.r_cut_new=self.cpp_force.get_r_cut()<line_sep>self.nlist=nlist<line_sep>self.nlist.subscribe(<lambda>:self.get_rcut())<line_sep>self.nlist.update_rcut()<line_sep>#Load neighbor list to compute. self.cpp_force.set_neighbor_list(self.nlist.cpp_nlist)<if_stmt>hoomd.context.current.device.cpp_exec_conf.isCUDAEnabled()<block_start>self.nlist.cpp_nlist.setStorageMode(_md.NeighborList.storageMode.full)<block_end>hoomd.context.current.device.cpp_msg.notice(2 "Set r_cut = "+str(self.r_cut_new)+" from potential`s file '"+str(file)+"'.\n")<line_sep>hoomd.context.current.system.addCompute(self.cpp_force self.force_name)<line_sep>self.pair_coeff=hoomd.md.pair.coeff()<block_end><def_stmt>get_rcut self# go through the list of only the active particle types in the simulation <block_start>ntypes=hoomd.context.current.system_definition.getParticleData().getNTypes()<line_sep>type_list=[]<for_stmt>i range(0 ntypes)<block_start>type_list.append(hoomd.context.current.system_definition.getParticleData().getNameByType(i))<block_end># update the rcut by pair type r_cut_dict=nl.rcut()<for_stmt>i range(0 ntypes)<block_start><for_stmt>j range(i ntypes)# get the r_cut value <block_start>r_cut_dict.set_pair(type_list[i] type_list[j] self.r_cut_new)<block_end><block_end><return>r_cut_dict<block_end><def_stmt>update_coeffs self# check that the pair coefficients are valid <block_start><pass><block_end><block_end>
'''Autogenerated by xml_generate script, do not edit!'''<import_from_stmt>OpenGL platform<as>_p arrays<line_sep># Code generation uses this <import_from_stmt>OpenGL.raw.GLES2 _types<as>_cs<line_sep># End users want this... <import_from_stmt>OpenGL.raw.GLES2._types *<import_from_stmt>OpenGL.raw.GLES2 _errors<import_from_stmt>OpenGL.constant Constant<as>_C<import_stmt>ctypes<line_sep>_EXTENSION_NAME='GLES2_OES_geometry_shader'<def_stmt>_f function<block_start><return>_p.createFunction(function _p.PLATFORM.GLES2 'GLES2_OES_geometry_shader' error_checker=_errors._error_checker)<block_end>GL_FIRST_VERTEX_CONVENTION_OES=_C('GL_FIRST_VERTEX_CONVENTION_OES' 0x8E4D)<line_sep>GL_FRAMEBUFFER_ATTACHMENT_LAYERED_OES=_C('GL_FRAMEBUFFER_ATTACHMENT_LAYERED_OES' 0x8DA7)<line_sep>GL_FRAMEBUFFER_DEFAULT_LAYERS_OES=_C('GL_FRAMEBUFFER_DEFAULT_LAYERS_OES' 0x9312)<line_sep>GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_OES=_C('GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_OES' 0x8DA8)<line_sep>GL_GEOMETRY_LINKED_INPUT_TYPE_OES=_C('GL_GEOMETRY_LINKED_INPUT_TYPE_OES' 0x8917)<line_sep>GL_GEOMETRY_LINKED_OUTPUT_TYPE_OES=_C('GL_GEOMETRY_LINKED_OUTPUT_TYPE_OES' 0x8918)<line_sep>GL_GEOMETRY_LINKED_VERTICES_OUT_OES=_C('GL_GEOMETRY_LINKED_VERTICES_OUT_OES' 0x8916)<line_sep>GL_GEOMETRY_SHADER_BIT_OES=_C('GL_GEOMETRY_SHADER_BIT_OES' 0x00000004)<line_sep>GL_GEOMETRY_SHADER_INVOCATIONS_OES=_C('GL_GEOMETRY_SHADER_INVOCATIONS_OES' 0x887F)<line_sep>GL_GEOMETRY_SHADER_OES=_C('GL_GEOMETRY_SHADER_OES' 0x8DD9)<line_sep>GL_LAST_VERTEX_CONVENTION_OES=_C('GL_LAST_VERTEX_CONVENTION_OES' 0x8E4E)<line_sep>GL_LAYER_PROVOKING_VERTEX_OES=_C('GL_LAYER_PROVOKING_VERTEX_OES' 0x825E)<line_sep>GL_LINES_ADJACENCY_OES=_C('GL_LINES_ADJACENCY_OES' 0x000A)<line_sep>GL_LINE_STRIP_ADJACENCY_OES=_C('GL_LINE_STRIP_ADJACENCY_OES' 0x000B)<line_sep>GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS_OES=_C('GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS_OES' 0x8A32)<line_sep>GL_MAX_FRAMEBUFFER_LAYERS_OES=_C('GL_MAX_FRAMEBUFFER_LAYERS_OES' 0x9317)<line_sep>GL_MAX_GEOMETRY_ATOMIC_COUNTERS_OES=_C('GL_MAX_GEOMETRY_ATOMIC_COUNTERS_OES' 0x92D5)<line_sep>GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS_OES=_C('GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS_OES' 0x92CF)<line_sep>GL_MAX_GEOMETRY_IMAGE_UNIFORMS_OES=_C('GL_MAX_GEOMETRY_IMAGE_UNIFORMS_OES' 0x90CD)<line_sep>GL_MAX_GEOMETRY_INPUT_COMPONENTS_OES=_C('GL_MAX_GEOMETRY_INPUT_COMPONENTS_OES' 0x9123)<line_sep>GL_MAX_GEOMETRY_OUTPUT_COMPONENTS_OES=_C('GL_MAX_GEOMETRY_OUTPUT_COMPONENTS_OES' 0x9124)<line_sep>GL_MAX_GEOMETRY_OUTPUT_VERTICES_OES=_C('GL_MAX_GEOMETRY_OUTPUT_VERTICES_OES' 0x8DE0)<line_sep>GL_MAX_GEOMETRY_SHADER_INVOCATIONS_OES=_C('GL_MAX_GEOMETRY_SHADER_INVOCATIONS_OES' 0x8E5A)<line_sep>GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS_OES=_C('GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS_OES' 0x90D7)<line_sep>GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_OES=_C('GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_OES' 0x8C29)<line_sep>GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_OES=_C('GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_OES' 0x8DE1)<line_sep>GL_MAX_GEOMETRY_UNIFORM_BLOCKS_OES=_C('GL_MAX_GEOMETRY_UNIFORM_BLOCKS_OES' 0x8A2C)<line_sep>GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_OES=_C('GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_OES' 0x8DDF)<line_sep>GL_PRIMITIVES_GENERATED_OES=_C('GL_PRIMITIVES_GENERATED_OES' 0x8C87)<line_sep>GL_REFERENCED_BY_GEOMETRY_SHADER_OES=_C('GL_REFERENCED_BY_GEOMETRY_SHADER_OES' 0x9309)<line_sep>GL_TRIANGLES_ADJACENCY_OES=_C('GL_TRIANGLES_ADJACENCY_OES' 0x000C)<line_sep>GL_TRIANGLE_STRIP_ADJACENCY_OES=_C('GL_TRIANGLE_STRIP_ADJACENCY_OES' 0x000D)<line_sep>GL_UNDEFINED_VERTEX_OES=_C('GL_UNDEFINED_VERTEX_OES' 0x8260)<line_sep>@_f@_p.types(<none> _cs.GLenum _cs.GLenum _cs.GLuint _cs.GLint)<def_stmt>glFramebufferTextureOES target attachment texture level<block_start><pass><block_end>
# Copyright (c) 2018, 2019, Oracle and/or its affiliates. # This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See LICENSE.TXT for details. <import_stmt>pytest<import_from_stmt>nose.plugins.skip SkipTest<import_stmt>logging<import_from_stmt>ansible.modules.cloud.oracle oci_autonomous_data_warehouse<import_from_stmt>ansible.module_utils.oracle oci_utils oci_db_utils<import_stmt>tempfile<import_stmt>os<try_stmt><block_start><import_stmt>oci<import_from_stmt>oci.util to_dict<import_from_stmt>oci.database.models AutonomousDataWarehouse<import_from_stmt>oci.exceptions ServiceError ClientError<block_end><except_stmt>ImportError<block_start><raise>SkipTest("test_oci_autonomous_data_warehouse.py requires `oci` module")<block_end><class_stmt>FakeModule(object)<block_start><def_stmt>__init__ self **kwargs<block_start>self.params=kwargs<block_end><def_stmt>fail_json self *args **kwargs<block_start>self.exit_args=args<line_sep>self.exit_kwargs=kwargs<line_sep><raise>Exception(kwargs["msg"])<block_end><def_stmt>exit_json self *args **kwargs<block_start>self.exit_args=args<line_sep>self.exit_kwargs=kwargs<block_end><block_end>@pytest.fixture()<def_stmt>db_client mocker<block_start>mock_db_client=mocker.patch("oci.database.database_client.DatabaseClient")<line_sep><return>mock_db_client.return_value<block_end>@pytest.fixture()<def_stmt>check_and_create_resource_patch mocker<block_start><return>mocker.patch.object(oci_utils "check_and_create_resource")<block_end>@pytest.fixture()<def_stmt>update_autonomous_data_warehouse_patch mocker<block_start><return>mocker.patch.object(oci_autonomous_data_warehouse "update_autonomous_data_warehouse")<block_end>@pytest.fixture()<def_stmt>check_and_update_resource_patch mocker<block_start><return>mocker.patch.object(oci_utils "check_and_update_resource")<block_end>@pytest.fixture()<def_stmt>create_and_wait_patch mocker<block_start><return>mocker.patch.object(oci_utils "create_and_wait")<block_end>@pytest.fixture()<def_stmt>get_existing_resource_patch mocker<block_start><return>mocker.patch.object(oci_utils "get_existing_resource")<block_end>@pytest.fixture()<def_stmt>delete_and_wait_patch mocker<block_start><return>mocker.patch.object(oci_utils "delete_and_wait")<block_end>@pytest.fixture()<def_stmt>execute_function_and_wait_patch mocker<block_start><return>mocker.patch.object(oci_db_utils "execute_function_and_wait")<block_end>@pytest.fixture()<def_stmt>call_with_backoff_patch mocker<block_start><return>mocker.patch.object(oci_utils "call_with_backoff")<block_end>@pytest.fixture()<def_stmt>write_stream_to_file_patch mocker<block_start><return>mocker.patch.object(oci_db_utils "write_stream_to_file")<block_end><def_stmt>setUpModule <block_start>logging.basicConfig(filename="/tmp/oci_ansible_module.log" filemode="a" level=logging.INFO)<line_sep>oci_autonomous_data_warehouse.set_logger(logging)<block_end><def_stmt>test_create_or_update_autonomous_data_warehouse_create db_client check_and_create_resource_patch<block_start>module=get_module()<line_sep>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>check_and_create_resource_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.create_or_update_autonomous_data_warehouse(db_client module)<assert_stmt>(result["autonomous_data_warehouse"]["display_name"]<is>autonomous_data_warehouse.display_name)<block_end><def_stmt>test_create_or_update_autonomous_data_warehouse_update db_client update_autonomous_data_warehouse_patch<block_start>module=get_module(dict({"autonomous_data_warehouse_id":"ocid1.autonomous_data_warehouse.aaa"}))<line_sep>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>update_autonomous_data_warehouse_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.create_or_update_autonomous_data_warehouse(db_client module)<assert_stmt>(result["autonomous_data_warehouse"]["display_name"]<is>autonomous_data_warehouse.display_name)<block_end><def_stmt>test_create_or_update_autonomous_data_warehousee_client_error db_client check_and_create_resource_patch<block_start>error_message="databse attribute has no value"<line_sep>module=get_module()<line_sep>check_and_create_resource_patch.side_effect=ClientError(Exception(error_message))<try_stmt><block_start>oci_autonomous_data_warehouse.create_or_update_autonomous_data_warehouse(db_client module)<block_end><except_stmt>Exception<as>ex<block_start><assert_stmt>error_message<in>ex.args[0]<block_end><block_end><def_stmt>test_create_or_update_autonomous_data_warehousee_service_error db_client check_and_create_resource_patch<block_start>error_message="Internal Server Error"<line_sep>module=get_module()<line_sep>check_and_create_resource_patch.side_effect=ServiceError(499 "InternalServerError" dict() error_message)<try_stmt><block_start>oci_autonomous_data_warehouse.create_or_update_autonomous_data_warehouse(db_client module)<block_end><except_stmt>Exception<as>ex<block_start><assert_stmt>error_message<in>ex.args[0]<block_end><block_end><def_stmt>test_create_autonomous_data_warehouse db_client create_and_wait_patch<block_start>module=get_module()<line_sep>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>create_and_wait_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.create_autonomous_data_warehouse(db_client module)<assert_stmt>(result["autonomous_data_warehouse"]["display_name"]<is>autonomous_data_warehouse.display_name)<block_end><def_stmt>test_update_autonomous_data_warehouse_cpu_core_count db_client check_and_update_resource_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>autonomous_data_warehouse.cpu_core_count=4<line_sep>get_existing_resource_patch.return_value=autonomous_data_warehouse<line_sep>module=get_module(dict({"autonomous_data_warehouse_id":"ocid1.autonomousdbwarehouse.aaa"}))<line_sep>check_and_update_resource_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.update_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><true><block_end><def_stmt>test_update_autonomous_data_warehouse_freeform_tags db_client check_and_update_resource_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>get_existing_resource_patch.return_value=autonomous_data_warehouse<line_sep>module=get_module(dict(freeform_tags=dict(system_type="oracledb") autonomous_data_warehouse_id="ocid1.autonomousdbwarehouse.aaa" ))<line_sep>check_and_update_resource_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.update_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><true><block_end><def_stmt>test_update_autonomous_data_warehouse_defined_tags db_client check_and_update_resource_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>get_existing_resource_patch.return_value=autonomous_data_warehouse<line_sep>module=get_module(dict(defined_tags=dict(system_strength=dict(shape="medium")) autonomous_data_warehouse_id="ocid1.autonomousdbwarehouse.aaa" ))<line_sep>check_and_update_resource_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.update_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><true><block_end><def_stmt>test_delete_db_system db_client delete_and_wait_patch<block_start>module=get_module(dict(autonomous_data_warehouse_id="ocid1.autonomousdatabase.aaa"))<line_sep>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>delete_and_wait_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.delete_autonomous_data_warehouse(db_client module)<assert_stmt>(result["autonomous_data_warehouse"]["display_name"]<is>autonomous_data_warehouse.display_name)<block_end><def_stmt>test_restore_autonomous_data_warehouse db_client execute_function_and_wait_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>module=get_module()<line_sep>execute_function_and_wait_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.restore_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><true><block_end><def_stmt>test_start_or_stop_autonomous_data_warehouse_start db_client get_existing_resource_patch execute_function_and_wait_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>autonomous_data_warehouse.lifecycle_state="STOPPED"<line_sep>get_existing_resource_patch.return_value=autonomous_data_warehouse<line_sep>module=get_module(dict(state="start"))<line_sep>execute_function_and_wait_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.start_or_stop_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><true><block_end><def_stmt>test_start_or_stop_autonomous_data_warehouse_start_idempotent db_client get_existing_resource_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>autonomous_data_warehouse.lifecycle_state="AVAILABLE"<line_sep>get_existing_resource_patch.return_value=autonomous_data_warehouse<line_sep>module=get_module(dict(state="start"))<line_sep>result=oci_autonomous_data_warehouse.start_or_stop_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><false><block_end><def_stmt>test_start_or_stop_autonomous_data_warehouse_stop db_client get_existing_resource_patch execute_function_and_wait_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>autonomous_data_warehouse.lifecycle_state="AVAILABLE"<line_sep>get_existing_resource_patch.return_value=autonomous_data_warehouse<line_sep>module=get_module(dict(state="stop"))<line_sep>execute_function_and_wait_patch.return_value={"autonomous_data_warehouse":to_dict(autonomous_data_warehouse) "changed":<true> }<line_sep>result=oci_autonomous_data_warehouse.start_or_stop_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><true><block_end><def_stmt>test_start_or_stop_autonomous_data_warehouse_stop_idempotent db_client get_existing_resource_patch<block_start>autonomous_data_warehouse=get_autonomous_data_warehouse()<line_sep>autonomous_data_warehouse.lifecycle_state="STOPPED"<line_sep>get_existing_resource_patch.return_value=autonomous_data_warehouse<line_sep>module=get_module(dict(state="stop"))<line_sep>result=oci_autonomous_data_warehouse.start_or_stop_autonomous_data_warehouse(db_client module)<assert_stmt>result["changed"]<is><false><block_end><def_stmt>test_generate_wallet db_client call_with_backoff_patch write_stream_to_file_patch<block_start>call_with_backoff_patch.return_value=get_response(200 <none> "test" <none>)<line_sep>write_stream_to_file_patch.return_value=<true><line_sep>module=get_module(dict(password="<PASSWORD>" wallet_file="test_wallet_file"))<line_sep>result=oci_autonomous_data_warehouse.generate_wallet(db_client module)<assert_stmt>result["changed"]<is><true><block_end><def_stmt>test_generate_wallet_no_wallet_file_defined db_client<block_start>error_message="Wallet file must be declared"<line_sep>module=get_module(dict(password="<PASSWORD>"))<try_stmt><block_start>oci_autonomous_data_warehouse.generate_wallet(db_client module)<block_end><except_stmt>Exception<as>ex<block_start><assert_stmt>error_message<in>ex.args[0]<block_end><block_end><def_stmt>test_generate_wallet_empty_wallet_file_defined db_client<block_start>error_message="Wallet file must be declared"<line_sep>module=get_module(dict(password="<PASSWORD>" wallet_file=" "))<try_stmt><block_start>oci_autonomous_data_warehouse.generate_wallet(db_client module)<block_end><except_stmt>Exception<as>ex<block_start><assert_stmt>error_message<in>ex.args[0]<block_end><block_end><def_stmt>test_generate_wallet_service_error db_client call_with_backoff_patch<block_start>error_message="Internal Server Error"<line_sep>module=get_module(dict(password="<PASSWORD>" wallet_file="test_wallet_file"))<line_sep>call_with_backoff_patch.side_effect=ServiceError(499 "InternalServerError" dict() error_message)<try_stmt><block_start>oci_autonomous_data_warehouse.generate_wallet(db_client module)<block_end><except_stmt>Exception<as>ex<block_start><assert_stmt>error_message<in>ex.args[0]<block_end><block_end><def_stmt>test_generate_wallet_client_error db_client call_with_backoff_patch<block_start>error_message="Wallet file not valid"<line_sep>module=get_module(dict(password="<PASSWORD>" wallet_file="test_wallet_file"))<line_sep>call_with_backoff_patch.side_effect=ClientError(Exception(error_message))<try_stmt><block_start>oci_autonomous_data_warehouse.generate_wallet(db_client module)<block_end><except_stmt>Exception<as>ex<block_start><assert_stmt>error_message<in>ex.args[0]<block_end><block_end><def_stmt>get_autonomous_data_warehouse <block_start>autonomous_data_warehouse=AutonomousDataWarehouse()<line_sep>autonomous_data_warehouse.display_name="ansible_autonomous_data_warehousee"<line_sep>autonomous_data_warehouse.freeform_tags={"system_type":"exadata"}<line_sep>autonomous_data_warehouse.defined_tags={"system_strength":{"shape":"small"}}<line_sep><return>autonomous_data_warehouse<block_end><def_stmt>get_response status header data request<block_start><return>oci.Response(status header data request)<block_end><def_stmt>get_module additional_properties=<none><block_start>params={"compartment_id":"ocid1.compartment.oc1..xxxxxEXAMPLExxxxx..qndq" "admin_password":"<PASSWORD>" "data_storage_size_in_tbs":1 "cpu_core_count":1 "db_name":"ansibledbwarehouse" "display_name":"ansibleautodbwarehouse" "license_model":"LICENSE_INCLUDED" "wait":<false> "freeform_tags":{"db_type":"test"} }<if_stmt>additional_properties<block_start>params.update(additional_properties)<block_end>module=FakeModule(**params)<line_sep><return>module<block_end>
# coding=UTF-8 #------------------------------------------------------------------------------ # Copyright (c) 2007-2021, Acoular Development Team. #------------------------------------------------------------------------------ # separate file to find out about version without importing the acoular lib __author__="Acoular Development Team"<line_sep>__date__="5 May 2021"<line_sep>__version__="21.05"<line_sep>
<import_from_stmt>...protocols socks4 socks5<import_from_stmt>...utils run_parser_curio<import_from_stmt>..base.server ProxyBase<class_stmt>SocksProxy(ProxyBase)<block_start>proto="SOCKS"<def_stmt>__init__ self bind_addr auth=<none> via=<none> plugin=<none> **kwargs<block_start>self.bind_addr=bind_addr<line_sep>self.auth=auth<line_sep>self.via=via<line_sep>self.plugin=plugin<line_sep>self.kwargs=kwargs<block_end><async_keyword><def_stmt>_run self<block_start>socks5_parser=socks5.server.parser(self.auth)<line_sep>request=<await>run_parser_curio(socks5_parser self.client)<line_sep>self.target_addr=(request.addr.host request.addr.port)<line_sep>via_client=<await>self.connect_server(self.target_addr)<line_sep># await self.client.sendall(socks5.resp()) socks5_parser.send_event(0)<line_sep><await>run_parser_curio(socks5_parser self.client)<async_keyword><with_stmt>via_client<block_start>redundant=socks5_parser.readall()<if_stmt>redundant<block_start><await>via_client.sendall(redundant)<block_end><await>self.relay(via_client)<block_end><block_end><block_end><class_stmt>Socks4Proxy(ProxyBase)<block_start>proto="SOCKS4"<def_stmt>__init__ self bind_addr auth=<none> via=<none> plugin=<none> **kwargs<block_start>self.bind_addr=bind_addr<line_sep>self.auth=auth<line_sep>self.via=via<line_sep>self.plugin=plugin<line_sep>self.kwargs=kwargs<block_end><async_keyword><def_stmt>_run self<block_start>socks4_parser=socks4.server.parser()<line_sep>self.target_addr=<await>run_parser_curio(socks4_parser self.client)<line_sep>via_client=<await>self.connect_server(self.target_addr)<line_sep>socks4_parser.send_event(0x5A)<line_sep><await>run_parser_curio(socks4_parser self.client)<async_keyword><with_stmt>via_client<block_start>redundant=socks4_parser.readall()<if_stmt>redundant<block_start><await>via_client.sendall(redundant)<block_end><await>self.relay(via_client)<block_end><block_end><block_end>
# Copyright (C) 2017 Beijing Didi Infinity Technology and Development Co.,Ltd. # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== ''' Base Speech Task'''<import_from_stmt>delta utils<import_from_stmt>delta.data utils<as>data_utils<import_from_stmt>delta.data.task.base_task WavSpeechTask<line_sep>#pylint: disable=abstract-method <class_stmt>SpeechTask(WavSpeechTask)<block_start>''' base class for speech task'''<def_stmt>__init__ self config mode<block_start>super().__init__(config)<assert_stmt>mode<in>(utils.TRAIN utils.EVAL utils.INFER)<line_sep>self._mode=mode<block_end>@property<def_stmt>mode self<block_start><return>self._mode<block_end>#pylint: disable=arguments-differ <def_stmt>input_fn self mode batch_size num_epoch=<none><block_start>''' estimator input_fn'''<line_sep><return>data_utils.input_fn(self.dataset mode batch_size num_epoch)<block_end><block_end>
""" Dataset for distiller Author: <NAME> (https://github.com/vectominist) """<import_stmt>os<import_stmt>random<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_stmt>torch<import_from_stmt>torch.nn.utils.rnn pad_sequence<import_from_stmt>torch.utils.data.dataset Dataset<import_stmt>torchaudio<line_sep>HALF_BATCHSIZE_TIME=99999<class_stmt>WaveDataset(Dataset)<block_start>"""Waveform dataset for Disiller"""<def_stmt>__init__ self task_config bucket_size file_path sets max_timestep=0 libri_root=<none> **kwargs<block_start>super().__init__()<line_sep>self.task_config=task_config<line_sep>self.libri_root=libri_root<line_sep>self.sample_length=task_config["sequence_length"]<if_stmt>self.sample_length<g>0<block_start>print("[Dataset] - Sampling random segments for training, sample length:" self.sample_length )<block_end># Read file self.root=file_path<line_sep>tables=[pd.read_csv(os.path.join(file_path s+".csv"))<for>s sets]<line_sep>self.table=pd.concat(tables ignore_index=<true>).sort_values(by=["length"] ascending=<false>)<line_sep>print("[Dataset] - Training data from these sets:" str(sets))<line_sep># Drop seqs that are too long <if_stmt>max_timestep<g>0<block_start>self.table=self.table[self.table.length<l>max_timestep]<block_end># Drop seqs that are too short <if_stmt>max_timestep<l>0<block_start>self.table=self.table[self.table.length<g>(-1<times>max_timestep)]<block_end>X=self.table["file_path"].tolist()<line_sep>X_lens=self.table["length"].tolist()<line_sep>self.num_samples=len(X)<line_sep>print("[Dataset] - Number of individual training instances:" self.num_samples)<line_sep># Use bucketing to allow different batch size at run time self.X=[]<line_sep>batch_x,batch_len=[] []<for_stmt>x,x_len zip(X X_lens)<block_start>batch_x.append(x)<line_sep>batch_len.append(x_len)<line_sep># Fill in batch_x until batch is full <if_stmt>len(batch_x)<eq>bucket_size# Half the batch size if seq too long <block_start><if_stmt>((bucket_size<ge>2)<and>(max(batch_len)<g>HALF_BATCHSIZE_TIME)<and>self.sample_length<eq>0)<block_start>self.X.append(batch_x[:bucket_size<floordiv>2])<line_sep>self.X.append(batch_x[bucket_size<floordiv>2:])<block_end><else_stmt><block_start>self.X.append(batch_x)<block_end>batch_x,batch_len=[] []<block_end><block_end># Gather the last batch <if_stmt>len(batch_x)<g>1<block_start>self.X.append(batch_x)<block_end><block_end><def_stmt>_sample self x<block_start><if_stmt>self.sample_length<le>0<block_start><return>x<block_end><if_stmt>len(x)<l>self.sample_length<block_start><return>x<block_end>idx=random.randint(0 len(x)-self.sample_length)<line_sep><return>x[idx:idx+self.sample_length]<block_end><def_stmt>__len__ self<block_start><return>len(self.X)<block_end><def_stmt>collate_fn self items<block_start>items=items[0]# hack bucketing <assert_stmt>(len(items)<eq>4) "__getitem__ should return (wave_input, wave_orig, wave_len, pad_mask)"<line_sep><return>items<block_end><block_end><class_stmt>OnlineWaveDataset(WaveDataset)<block_start>"""Online waveform dataset"""<def_stmt>__init__ self task_config bucket_size file_path sets max_timestep=0 libri_root=<none> target_level=-25 **kwargs<block_start>super().__init__(task_config bucket_size file_path sets max_timestep libri_root **kwargs)<line_sep>self.target_level=target_level<block_end><def_stmt>_load_feat self feat_path<block_start><if_stmt>self.libri_root<is><none><block_start><return>torch.FloatTensor(np.load(os.path.join(self.root feat_path)))<block_end>wav,_=torchaudio.load(os.path.join(self.libri_root feat_path))<line_sep><return>wav.squeeze()<block_end># (seq_len) <def_stmt>__getitem__ self index# Load acoustic feature and pad <block_start>x_batch=[self._sample(self._load_feat(x_file))<for>x_file self.X[index]]<line_sep>x_lens=[len(x)<for>x x_batch]<line_sep>x_lens=torch.LongTensor(x_lens)<line_sep>x_pad_batch=pad_sequence(x_batch batch_first=<true>)<line_sep>pad_mask=torch.ones(x_pad_batch.shape)# (batch_size, seq_len) # zero vectors for padding dimension <for_stmt>idx range(x_pad_batch.shape[0])<block_start>pad_mask[idx x_lens[idx]:]=0<block_end><return>[x_pad_batch x_batch x_lens pad_mask]<block_end><block_end>
# -*- coding:utf-8 -*- <import_stmt>os<import_stmt>sys<import_from_stmt>multiprocessing Pool<import_stmt>cv2<import_stmt>imgaug<as>ia<import_from_stmt>imgaug augmenters<as>iaa<import_stmt>numpy<as>np<import_stmt>PIL<import_from_stmt>PIL Image<line_sep>PIL.Image.MAX_IMAGE_PIXELS=200000000<line_sep>process_num=64<line_sep>ia.seed(1)<def_stmt>preprocess_handler img_name img_dir rot_list out_img_dir='/data/dataset/SKU110K/SKU110K-R/images'<block_start>img_path=os.path.join(img_dir img_name)<try_stmt><block_start>img=Image.open(img_path).convert('RGB')<line_sep>img=np.array(img)<block_end><except_stmt><block_start><try_stmt><block_start>img=cv2.imread(img_path)<block_end><except_stmt><block_start>print(img_path)<block_end><block_end><for_stmt>ang rot_list<block_start>seq=iaa.Sequential([iaa.Affine(rotate=ang fit_output=<true>)])<line_sep>seq_det=seq.to_deterministic()<line_sep>image_aug=seq_det.augment_images([img])[0]<line_sep>out_img_name='rotate_aug_{}_'.format(str(ang))<line_sep>out_img_name=out_img_name+img_name<if_stmt>out_img_dir<is><none><block_start>out_dir=os.path.join(img_dir out_img_name)<block_end><else_stmt><block_start>out_dir=os.path.join(out_img_dir out_img_name)<block_end>cv2.imwrite(out_dir image_aug [int(cv2.IMWRITE_JPEG_QUALITY) 81])<block_end><block_end><def_stmt>main img_dir<block_start>rotate_angle_list=[-45 -30 -15 15 30 45]<line_sep>p=Pool(process_num)<for_stmt>img_name os.listdir(img_dir)<block_start>p.apply_async(preprocess_handler args=(img_name img_dir rotate_angle_list))<block_end>p.close()<line_sep>p.join()<block_end><if_stmt>__name__<eq>'__main__'<block_start>root_img_dir=sys.argv[1]<line_sep>main(root_img_dir)<block_end>
<import_stmt>sys<import_stmt>os<import_stmt>pytest<import_from_stmt>numpy array array_equal allclose<import_stmt>matplotlib<line_sep>matplotlib.use('Agg')<import_stmt>matplotlib.pyplot<as>plt<import_from_stmt>lxmls.readers galton<line_sep>tolerance=1e-5<line_sep>@pytest.fixture(scope='module')<def_stmt>galton_data <block_start><return>galton.load()<block_end><def_stmt>test_galton_data galton_data<block_start>mean=galton_data.mean(0)<line_sep>expected_mean=array([68.30818966 68.08846983])<assert_stmt>allclose(mean expected_mean tolerance)<line_sep>std=galton_data.std(0)<line_sep>expected_std=array([1.78637014 2.51658435])<assert_stmt>allclose(std expected_std tolerance)<line_sep>n,bins,_=plt.hist(galton_data)<line_sep>expected_n=[array([0. 14. 23. 66. 289. 219. 183. 68. 43. 23.]) array([12. 32. 107. 117. 138. 120. 167. 163. 41. 31.])]<line_sep>expected_bins=array([61.7 62.9 64.1 65.3 66.5 67.7 68.9 70.1 71.3 72.5 73.7])<assert_stmt>allclose(n expected_n tolerance)<assert_stmt>allclose(bins expected_bins tolerance)<block_end><if_stmt>__name__<eq>'__main__'<block_start>pytest.main([__file__])<block_end>
"""Generate videos for adversaries and standard baselines."""<import_stmt>logging<import_stmt>os<import_stmt>os.path<as>osp<import_from_stmt>sacred Experiment<import_from_stmt>sacred.observers FileStorageObserver<import_from_stmt>aprl.common.utils make_timestamp<import_from_stmt>aprl.configs DATA_LOCATION<import_from_stmt>aprl.multi.score extract_data run_external<import_from_stmt>aprl.visualize util<line_sep>make_videos_ex=Experiment("make_videos")<line_sep>make_videos_logger=logging.getLogger("make_videos")<line_sep>@make_videos_ex.config<def_stmt>default_config <block_start>adversary_path=osp.join(DATA_LOCATION "multi_train" "paper" "highest_win_policies_and_rates.json")<line_sep>ray_upload_dir="data"# where Ray will upload multi.score outputs. 'data' works on baremetal score_configs=[("normal" ) ("normal" "mask_observations_of_victim")]<line_sep>multi_score={}<line_sep>root_dir="data/videos"<line_sep>exp_name="default"<line_sep>_=locals()# quieten flake8 unused variable warning <del_stmt>_<block_end>@make_videos_ex.named_config<def_stmt>defense_config <block_start>score_configs=[("defenses" ) ("defenses" "mask_observations_of_victim")]<line_sep>exp_name="defense"<line_sep>_=locals()# quieten flake8 unused variable warning <del_stmt>_<block_end>@make_videos_ex.named_config<def_stmt>slides_config <block_start>"""Generate a subset of videos, with tighter-cropped camera. Intended for slideshows/demos."""<line_sep>score_configs=[("summary" ) ("summary" "mask_observations_of_victim")]<line_sep>multi_score={"score":{"video_params":{"annotation_params":{"camera_config":"close" "short_labels":<true>}}}}<line_sep>exp_name="slides"<line_sep>_=locals()# quieten flake8 unused variable warning <del_stmt>_<block_end>LOW_RES={"score":{"video_params":{"annotation_params":{"resolution":(640 480) "font_size":24}}}}<line_sep>@make_videos_ex.named_config<def_stmt>low_res <block_start>multi_score=LOW_RES<block_end># noqa: F841 @make_videos_ex.named_config<def_stmt>debug_config <block_start>score_configs=[("debug_one_each_type" ) ("debug_one_each_type" "mask_observations_of_victim") ]<line_sep>multi_score=dict(LOW_RES)<line_sep>multi_score["score"]["episodes"]=2<line_sep>exp_name="debug"<line_sep>_=locals()# quieten flake8 unused variable warning <del_stmt>_<block_end>@make_videos_ex.capture<def_stmt>generate_videos score_configs multi_score adversary_path<block_start>"""Uses multi.score to generate videos."""<line_sep><return>run_external(score_configs post_named_configs=["video"] config_updates=multi_score adversary_path=adversary_path )<block_end>@make_videos_ex.capture<def_stmt>extract_videos out_dir video_dirs ray_upload_dir<block_start><def_stmt>path_generator trial_root env_sanitized victim_index victim_type victim_path opponent_type opponent_path cfg <block_start>src_path=osp.join(trial_root "data" "sacred" "score" "1" "videos" "env_0_episode_0_recording.mp4")<line_sep>victim_suffix=""<line_sep>opponent_suffix=""<line_sep>mask_index=cfg["mask_agent_index"]<if_stmt>mask_index<is><not><none><block_start><if_stmt>mask_index<eq>victim_index<block_start>victim_suffix="M"<block_end><else_stmt><block_start>opponent_suffix<eq>"M"<block_end><block_end>victim=util.abbreviate_agent_config(cfg["env_name"] victim_type victim_path victim_suffix victim=<true>)<line_sep>opponent=util.abbreviate_agent_config(cfg["env_name"] opponent_type opponent_path opponent_suffix victim=<false>)<line_sep>new_name=f"{env_sanitized}_victim_{victim}_opponent_{opponent}"<line_sep><return>src_path new_name "mp4"<block_end><return>extract_data(path_generator out_dir video_dirs ray_upload_dir)<block_end>@make_videos_ex.main<def_stmt>make_videos root_dir exp_name<block_start>out_dir=osp.join(root_dir exp_name make_timestamp())<line_sep>os.makedirs(out_dir)<line_sep>video_dirs=generate_videos()<line_sep>extract_videos(out_dir=out_dir video_dirs=video_dirs)<block_end><def_stmt>main <block_start>observer=FileStorageObserver(osp.join("data" "sacred" "make_videos"))<line_sep>make_videos_ex.observers.append(observer)<line_sep>make_videos_ex.run_commandline()<line_sep>make_videos_logger.info("Sacred run completed, files stored at {}".format(observer.dir))<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
### ### This file was automatically generated ### <import_from_stmt>archinfo.arch register_arch Endness Register<import_from_stmt>.common ArchPcode<class_stmt>ArchPcode_SuperH_BE_32_SH_1(ArchPcode)<block_start>name='SuperH:BE:32:SH-1'<line_sep>pcode_arch='SuperH:BE:32:SH-1'<line_sep>description='SuperH SH-1 processor 32-bit big-endian'<line_sep>bits=32<line_sep>ip_offset=0x118<line_sep>sp_offset=0x3c<line_sep>bp_offset=sp_offset<line_sep>instruction_endness=Endness.BE<line_sep>register_list=[Register('r0' 4 0x0) Register('r1' 4 0x4) Register('r2' 4 0x8) Register('r3' 4 0xc) Register('r4' 4 0x10) Register('r5' 4 0x14) Register('r6' 4 0x18) Register('r7' 4 0x1c) Register('r8' 4 0x20) Register('r9' 4 0x24) Register('r10' 4 0x28) Register('r11' 4 0x2c) Register('r12' 4 0x30) Register('r13' 4 0x34) Register('r14' 4 0x38) Register('r15' 4 0x3c) Register('sr' 4 0x100) Register('gbr' 4 0x104) Register('vbr' 4 0x108) Register('mach' 4 0x10c) Register('macl' 4 0x110) Register('pr' 4 0x114) Register('pc' 4 0x118 alias_names=('ip' ))]<block_end>register_arch(['superh:be:32:sh-1'] 32 Endness.BE ArchPcode_SuperH_BE_32_SH_1)<line_sep>
<import_stmt>tensorflow<as>tf<import_stmt>pdb<def_stmt>_phase_shift I r batch_size=10# Helper function with main phase shift operation # pdb.set_trace() <block_start>_,a,b,c=I.get_shape().as_list()<line_sep>X=tf.reshape(I (batch_size a b r r))<line_sep>X=tf.transpose(X (0 1 2 4 3))# bsize, a, b, 1, 1 X=tf.split(X a 1)# a, [bsize, b, r, r] X=tf.concat([tf.squeeze(x)<for>x X] 2)# bsize, b, a*r, r <if_stmt>batch_size<eq>1<block_start>X=tf.expand_dims(X 0)<block_end>X=tf.split(X b 1)# b, [bsize, a*r, r] <if_stmt>batch_size<eq>1<block_start>X=tf.concat([x<for>x X] 2)<block_end><else_stmt><block_start>X=tf.concat([tf.squeeze(x)<for>x X] 2)# <block_end>out=tf.reshape(X (batch_size a<times>r b<times>r 1))<if_stmt>batch_size<eq>1<block_start>out=tf.transpose(out (0 2 1 3))<block_end><return>out<block_end><def_stmt>PS X r n_channel=8 batch_size=10# Main OP that you can arbitrarily use in you tensorflow code <block_start>Xc=tf.split(X n_channel -1)<line_sep>X=tf.concat([_phase_shift(x r batch_size)<for>x Xc] 3)<line_sep><return>X<block_end>
<import_from_stmt>envi.archs.msp430.regs *<line_sep>checks=[# SETC ('SETC' {'regs':[] 'flags':[(SR_N 0) (SR_Z 0) (SR_C 0) (SR_V 0)] 'code':"12d3" 'data':""} {'regs':[] 'flags':[(SR_N 0) (SR_Z 0) (SR_C 1) (SR_V 0)] 'code':"12d3" 'data':""}) ]<line_sep>
<class_stmt>dotRebarEndDetailStrip_t(object)# no doc <block_start>RebarHookData=<none><line_sep>RebarStrip=<none><line_sep>RebarThreading=<none><block_end>