prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>issue-5708.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*
# ICE when returning struct with reference to trait
A function which takes a reference to a trait and returns a
struct with that reference results in an ICE.
This does not occur with concrete types, only with references
to traits.
*/
// original
trait Inner {
fn print(&self);
}
impl Inner for int {
fn print(&self) { print!("Inner: {}\n", *self); }
}
struct Outer<'a> {
inner: &'a Inner+'a
}
impl<'a> Outer<'a> {
fn new(inner: &Inner) -> Outer {
Outer {
inner: inner
}
}
}
pub fn main() {
let inner = 5i;
let outer = Outer::new(&inner as &Inner);
outer.inner.print();
}
// minimal
trait MyTrait<T> { }
pub struct MyContainer<'a, T> {
foos: Vec<&'a MyTrait<T>+'a> ,
}
<|fim▁hole|> self.foos.push(foo);
}
}<|fim▁end|> | impl<'a, T> MyContainer<'a, T> {
pub fn add (&mut self, foo: &'a MyTrait<T>) { |
<|file_name|>parsers.py<|end_file_name|><|fim▁begin|>import re
from markdown import Markdown, TextPreprocessor
from smartypants import smartyPants
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import get_lexer_by_name, TextLexer
class CodeBlockPreprocessor(TextPreprocessor):
"""
The Pygments Markdown Preprocessor
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This fragment is a Markdown_ preprocessor that renders source code
to HTML via Pygments. To use it, invoke Markdown like so::
from markdown import Markdown
md = Markdown()
md.textPreprocessors.insert(0, CodeBlockPreprocessor())
html = md.convert(someText)
markdown is then a callable that can be passed to the context of
a template and used in that template, for example.
This uses CSS classes by default, so use
``pygmentize -S <some style> -f html > pygments.css``
to create a stylesheet to be added to the website.
You can then highlight source code in your markdown markup::
@@ lexer
some code
@@ end
.. _Markdown: http://www.freewisdom.org/projects/python-markdown/
:copyright: 2007 by Jochen Kupperschmidt.
:license: BSD, see LICENSE for more details.
"""
pattern = re.compile(r'@@ (.+?)\n(.+?)\n@@ end', re.S)
formatter = HtmlFormatter(noclasses=False)
def run(self, lines):
def repl(m):
try:
lexer = get_lexer_by_name(m.group(1))
except ValueError, instance:
lexer = TextLexer()
code = highlight(m.group(2), lexer, self.formatter)
code = code.replace('\n\n', '\n \n').replace('\n', '<br />')
return '\n\n<div class="code">%s</div>\n\n' % code
return self.pattern.sub(repl, lines)
class SmartyPantsPreprocessor(TextPreprocessor):
"""<|fim▁hole|> """
pattern = re.compile(r'(.+?)(@@.+?@@ end|$)', re.S)
def run(self, lines):
def repl(m):
return smartyPants(m.group(1)) + m.group(2)
return self.pattern.sub(repl, lines)
def parse_markdown(value):
"""
Parses a value into markdown syntax, using the pygments preprocessor and smartypants
"""
md = Markdown()
md.textPreprocessors.insert(0, SmartyPantsPreprocessor())
md.textPreprocessors.insert(1, CodeBlockPreprocessor())
return md.convert(value)<|fim▁end|> | A Markdown preprocessor that implements SmartyPants for converting plain
ASCII punctuation characters into typographically correct versions |
<|file_name|>test_profile.py<|end_file_name|><|fim▁begin|>"""Test suite for the profile module."""
import sys
import pstats
import unittest
from difflib import unified_diff
from io import StringIO
from test.support import run_unittest
import profile
from test.profilee import testfunc, timer
class ProfileTest(unittest.TestCase):
profilerclass = profile.Profile
methodnames = ['print_stats', 'print_callers', 'print_callees']
expected_max_output = ':0(max)'
def get_expected_output(self):
return _ProfileOutput
@classmethod
def do_profiling(cls):
results = []
prof = cls.profilerclass(timer, 0.001)
start_timer = timer()
prof.runctx("testfunc()", globals(), locals())
results.append(timer() - start_timer)
for methodname in cls.methodnames:
s = StringIO()
stats = pstats.Stats(prof, stream=s)
stats.strip_dirs().sort_stats("stdname")
getattr(stats, methodname)()
output = s.getvalue().splitlines()
mod_name = testfunc.__module__.rsplit('.', 1)[1]
# Only compare against stats originating from the test file.
# Prevents outside code (e.g., the io module) from causing
# unexpected output.
output = [line.rstrip() for line in output if mod_name in line]
results.append('\n'.join(output))
return results
def test_cprofile(self):
results = self.do_profiling()
expected = self.get_expected_output()
self.assertEqual(results[0], 1000)
for i, method in enumerate(self.methodnames):
if results[i+1] != expected[method]:
print("Stats.%s output for %s doesn't fit expectation!" %
(method, self.profilerclass.__name__))
print('\n'.join(unified_diff(
<|fim▁hole|> # Issue #5330: profile and cProfile wouldn't report C functions called
# with keyword arguments. We test all calling conventions.
stmts = [
"max([0])",
"max([0], key=int)",
"max([0], **dict(key=int))",
"max(*([0],))",
"max(*([0],), key=int)",
"max(*([0],), **dict(key=int))",
]
for stmt in stmts:
s = StringIO()
prof = self.profilerclass(timer, 0.001)
prof.runctx(stmt, globals(), locals())
stats = pstats.Stats(prof, stream=s)
stats.print_stats()
res = s.getvalue()
self.assertIn(self.expected_max_output, res,
"Profiling {0!r} didn't report max:\n{1}".format(stmt, res))
def regenerate_expected_output(filename, cls):
filename = filename.rstrip('co')
print('Regenerating %s...' % filename)
results = cls.do_profiling()
newfile = []
with open(filename, 'r') as f:
for line in f:
newfile.append(line)
if line.startswith('#--cut'):
break
with open(filename, 'w') as f:
f.writelines(newfile)
f.write("_ProfileOutput = {}\n")
for i, method in enumerate(cls.methodnames):
f.write('_ProfileOutput[%r] = """\\\n%s"""\n' % (
method, results[i+1]))
f.write('\nif __name__ == "__main__":\n main()\n')
def test_main():
run_unittest(ProfileTest)
def main():
if '-r' not in sys.argv:
test_main()
else:
regenerate_expected_output(__file__, ProfileTest)
# Don't remove this comment. Everything below it is auto-generated.
#--cut--------------------------------------------------------------------------
_ProfileOutput = {}
_ProfileOutput['print_stats'] = """\
28 27.972 0.999 27.972 0.999 profilee.py:110(__getattr__)
1 269.996 269.996 999.769 999.769 profilee.py:25(testfunc)
23/3 149.937 6.519 169.917 56.639 profilee.py:35(factorial)
20 19.980 0.999 19.980 0.999 profilee.py:48(mul)
2 39.986 19.993 599.830 299.915 profilee.py:55(helper)
4 115.984 28.996 119.964 29.991 profilee.py:73(helper1)
2 -0.006 -0.003 139.946 69.973 profilee.py:84(helper2_indirect)
8 311.976 38.997 399.912 49.989 profilee.py:88(helper2)
8 63.976 7.997 79.960 9.995 profilee.py:98(subhelper)"""
_ProfileOutput['print_callers'] = """\
:0(append) <- profilee.py:73(helper1)(4) 119.964
:0(exc_info) <- profilee.py:73(helper1)(4) 119.964
:0(hasattr) <- profilee.py:73(helper1)(4) 119.964
profilee.py:88(helper2)(8) 399.912
profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964
profilee.py:98(subhelper)(16) 79.960
profilee.py:25(testfunc) <- <string>:1(<module>)(1) 999.767
profilee.py:35(factorial) <- profilee.py:25(testfunc)(1) 999.769
profilee.py:35(factorial)(20) 169.917
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:48(mul) <- profilee.py:35(factorial)(20) 169.917
profilee.py:55(helper) <- profilee.py:25(testfunc)(2) 999.769
profilee.py:73(helper1) <- profilee.py:55(helper)(4) 599.830
profilee.py:84(helper2_indirect) <- profilee.py:55(helper)(2) 599.830
profilee.py:88(helper2) <- profilee.py:55(helper)(6) 599.830
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:98(subhelper) <- profilee.py:88(helper2)(8) 399.912"""
_ProfileOutput['print_callees'] = """\
:0(hasattr) -> profilee.py:110(__getattr__)(12) 27.972
<string>:1(<module>) -> profilee.py:25(testfunc)(1) 999.769
profilee.py:110(__getattr__) ->
profilee.py:25(testfunc) -> profilee.py:35(factorial)(1) 169.917
profilee.py:55(helper)(2) 599.830
profilee.py:35(factorial) -> profilee.py:35(factorial)(20) 169.917
profilee.py:48(mul)(20) 19.980
profilee.py:48(mul) ->
profilee.py:55(helper) -> profilee.py:73(helper1)(4) 119.964
profilee.py:84(helper2_indirect)(2) 139.946
profilee.py:88(helper2)(6) 399.912
profilee.py:73(helper1) -> :0(append)(4) -0.004
profilee.py:84(helper2_indirect) -> profilee.py:35(factorial)(2) 169.917
profilee.py:88(helper2)(2) 399.912
profilee.py:88(helper2) -> :0(hasattr)(8) 11.964
profilee.py:98(subhelper)(8) 79.960
profilee.py:98(subhelper) -> profilee.py:110(__getattr__)(16) 27.972"""
if __name__ == "__main__":
main()<|fim▁end|> | results[i+1].split('\n'),
expected[method].split('\n'))))
def test_calling_conventions(self):
|
<|file_name|>ClientApplicationAccessToken.java<|end_file_name|><|fim▁begin|>package fi.otavanopisto.pyramus.domainmodel.clientapplications;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToOne;
import javax.persistence.TableGenerator;
import javax.validation.constraints.NotNull;
import org.hibernate.validator.constraints.NotEmpty;
@Entity
public class ClientApplicationAccessToken {
public Long getId() {
return id;
}
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public Long getExpires() {
return expires;
}
public void setExpires(Long expires) {
this.expires = expires;
}
public ClientApplication getClientApplication() {
return clientApplication;
}
public void setClientApplication(ClientApplication clientApplication) {
this.clientApplication = clientApplication;
}
public ClientApplicationAuthorizationCode getClientApplicationAuthorizationCode() {
return clientApplicationAuthorizationCode;
}
public void setClientApplicationAuthorizationCode(ClientApplicationAuthorizationCode clientApplicationAuthorizationCode) {
this.clientApplicationAuthorizationCode = clientApplicationAuthorizationCode;
}
public String getRefreshToken() {
return refreshToken;
}
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
@Id
@GeneratedValue(strategy = GenerationType.TABLE, generator = "ClientApplicationAccessToken")
@TableGenerator(name = "ClientApplicationAccessToken", allocationSize = 1, table = "hibernate_sequences", pkColumnName = "sequence_name", valueColumnName = "sequence_next_hi_value")
private Long id;
@NotNull
@NotEmpty
@Column(nullable = false, unique=true)
private String accessToken;
@NotEmpty
@Column(nullable = false)
private String refreshToken;
@NotNull
@Column(nullable = false)
private Long expires;
@NotNull
@ManyToOne
@JoinColumn(name = "app_id", nullable = false)
private ClientApplication clientApplication;
<|fim▁hole|>
}<|fim▁end|> | @NotNull
@OneToOne
@JoinColumn(name = "clientApplicationAuthorizationCode", unique = true, nullable = false)
private ClientApplicationAuthorizationCode clientApplicationAuthorizationCode; |
<|file_name|>refl_gui.py<|end_file_name|><|fim▁begin|># pylint: disable = too-many-lines, invalid-name, line-too-long, too-many-instance-attributes,
# pylint: disable = too-many-branches,too-many-locals, too-many-nested-blocks
from __future__ import (absolute_import, division, print_function)
try:
from mantidplot import *
except ImportError:
canMantidPlot = False #
import csv
import os
import re
from operator import itemgetter
import itertools
from PyQt4 import QtCore, QtGui
from mantid.simpleapi import *
from isis_reflectometry.quick import *
from isis_reflectometry.convert_to_wavelength import ConvertToWavelength
from isis_reflectometry import load_live_runs
from isis_reflectometry.combineMulti import *
import mantidqtpython
from mantid.api import Workspace, WorkspaceGroup, CatalogManager, AlgorithmManager
from mantid import UsageService
from ui.reflectometer.ui_refl_window import Ui_windowRefl
from ui.reflectometer.refl_save import Ui_SaveWindow
from ui.reflectometer.refl_choose_col import ReflChoose
from ui.reflectometer.refl_options import ReflOptions
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
canMantidPlot = True
class ReflGui(QtGui.QMainWindow, Ui_windowRefl):
current_instrument = None
current_table = None
current_polarisation_method = None
labelStatus = None
accMethod = None
def __init__(self):
"""
Initialise the interface
"""
super(QtGui.QMainWindow, self).__init__()
self.setupUi(self)
self.loading = False
self.clip = QtGui.QApplication.clipboard()
self.shown_cols = {}
self.mod_flag = False
self.run_cols = [0, 5, 10]
self.angle_cols = [1, 6, 11]
self.scale_col = 16
self.stitch_col = 17
self.plot_col = 18
self.__graphs = dict()
self._last_trans = ""
self.icat_file_map = None
self.__instrumentRuns = None
self.__icat_download = False
self.__group_tof_workspaces = True
# Q Settings
self.__generic_settings = "Mantid/ISISReflGui"
self.__live_data_settings = "Mantid/ISISReflGui/LiveData"
self.__search_settings = "Mantid/ISISReflGui/Search"
self.__column_settings = "Mantid/ISISReflGui/Columns"
self.__icat_download_key = "icat_download"
self.__ads_use_key = "AlgUse"
self.__alg_migration_key = "AlgUseReset"
self.__live_data_frequency_key = "frequency"
self.__live_data_method_key = "method"
self.__group_tof_workspaces_key = "group_tof_workspaces"
self.__stitch_right_key = "stitch_right"
# Setup instrument with defaults assigned.
self.instrument_list = ['INTER', 'SURF', 'CRISP', 'POLREF', 'OFFSPEC']
self.polarisation_instruments = ['CRISP', 'POLREF']
self.polarisation_options = {'None': PolarisationCorrection.NONE,
'1-PNR': PolarisationCorrection.PNR,
'2-PA': PolarisationCorrection.PA}
# Set the live data settings, use default if none have been set before
settings = QtCore.QSettings()
settings.beginGroup(self.__live_data_settings)
self.live_method = settings.value(self.__live_data_method_key, "", type=str)
self.live_freq = settings.value(self.__live_data_frequency_key, 0, type=float)
if not self.live_freq:
logger.information(
"No settings were found for Update frequency of loading live data, Loading default of 60 seconds")
self.live_freq = float(60)
settings.setValue(self.__live_data_frequency_key, self.live_freq)
if not self.live_method:
logger.information(
"No settings were found for Accumulation Method of loading live data, Loading default of \"Add\"")
self.live_method = "Add"
settings.setValue(self.__live_data_method_key, self.live_method)
settings.endGroup()
settings.beginGroup(self.__generic_settings)
self.__alg_migrate = settings.value(self.__alg_migration_key, True, type=bool)
if self.__alg_migrate:
self.__alg_use = True # We will use the algorithms by default rather than the quick scripts
self.__alg_migrate = False # Never do this again. We only want to reset once.
else:
self.__alg_use = settings.value(self.__ads_use_key, True, type=bool)
self.__icat_download = settings.value(self.__icat_download_key, False, type=bool)
self.__group_tof_workspaces = settings.value(self.__group_tof_workspaces_key, True, type=bool)
self.__scale_right = settings.value(self.__stitch_right_key, True, type=bool)
settings.setValue(self.__ads_use_key, self.__alg_use)
settings.setValue(self.__icat_download_key, self.__icat_download)
settings.setValue(self.__group_tof_workspaces_key, self.__group_tof_workspaces)
settings.setValue(self.__alg_migration_key, self.__alg_migrate)
settings.setValue(self.__stitch_right_key, self.__scale_right)
settings.endGroup()
del settings
# register startup
UsageService.registerFeatureUsage("Interface", "ISIS Reflectomety", False)
def __del__(self):
"""
Save the contents of the table if the modified flag was still set
"""
if self.mod_flag:
self._save(true)
def _save_check(self):
"""
Show a custom message box asking if the user wants to save, or discard their changes or cancel back to the interface
"""
msgBox = QtGui.QMessageBox()
msgBox.setText("The table has been modified. Do you want to save your changes?")
accept_btn = QtGui.QPushButton('Save')
cancel_btn = QtGui.QPushButton('Cancel')
discard_btn = QtGui.QPushButton('Discard')
msgBox.addButton(accept_btn, QtGui.QMessageBox.AcceptRole)
msgBox.addButton(cancel_btn, QtGui.QMessageBox.RejectRole)
msgBox.addButton(discard_btn, QtGui.QMessageBox.NoRole)
msgBox.setIcon(QtGui.QMessageBox.Question)
msgBox.setDefaultButton(accept_btn)
msgBox.setEscapeButton(cancel_btn)
msgBox.exec_()
btn = msgBox.clickedButton()
saved = None
if btn.text() == accept_btn.text():
ret = QtGui.QMessageBox.AcceptRole
saved = self._save()
elif btn.text() == cancel_btn.text():
ret = QtGui.QMessageBox.RejectRole
else:
ret = QtGui.QMessageBox.NoRole
return ret, saved
def closeEvent(self, event):
"""
Close the window. but check if the user wants to save
"""
self.buttonProcess.setFocus()
if self.mod_flag:
event.ignore()
ret, saved = self._save_check()
if ret == QtGui.QMessageBox.AcceptRole:
if saved:
self.mod_flag = False
event.accept()
elif ret == QtGui.QMessageBox.RejectRole:
event.ignore()
elif ret == QtGui.QMessageBox.NoRole:
self.mod_flag = False
event.accept()
def _instrument_selected(self, instrument):
"""
Change the default instrument to the selected one
"""
config['default.instrument'] = self.instrument_list[instrument]
logger.notice("Instrument is now: " + str(config['default.instrument']))
self.textRB.clear()
self._populate_runs_list()
self.current_instrument = self.instrument_list[instrument]
self.comboPolarCorrect.setEnabled(
self.current_instrument in self.polarisation_instruments) # Enable as appropriate
self.comboPolarCorrect.setCurrentIndex(self.comboPolarCorrect.findText('None')) # Reset to None
def _table_modified(self, row, column):
"""
sets the modified flag when the table is altered
"""
# Sometimes users enter leading or trailing whitespace into a cell.
# Let's remove it for them automatically.
item = self.tableMain.item(row, column)
item.setData(0, str.strip(str(item.data(0))))
if not self.loading:
self.mod_flag = True
plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1]
self.__reset_plot_button(plotbutton)
def _plot_row(self):
"""
handler for the plot buttons
"""
plotbutton = self.sender()
self._plot(plotbutton)
def _show_slit_calculator(self):
calc = mantidqtpython.MantidQt.MantidWidgets.SlitCalculator(self)
calc.setCurrentInstrumentName(self.current_instrument)
calc.processInstrumentHasBeenChanged()
calc.exec_()
def _polar_corr_selected(self):
"""
Event handler for polarisation correction selection.
"""
if self.current_instrument in self.polarisation_instruments:
chosen_method = self.comboPolarCorrect.currentText()
self.current_polarisation_method = self.polarisation_options[chosen_method]
else:
logger.notice("Polarisation correction is not supported on " + str(self.current_instrument))
def setup_layout(self):
"""
Do further setup layout that couldn't be done in the designer
"""
self.comboInstrument.addItems(self.instrument_list)
current_instrument = config['default.instrument'].upper()
if current_instrument in self.instrument_list:
self.comboInstrument.setCurrentIndex(self.instrument_list.index(current_instrument))
else:
self.comboInstrument.setCurrentIndex(0)
config['default.instrument'] = 'INTER'
self.current_instrument = config['default.instrument'].upper()
# Setup polarisation options with default assigned
self.comboPolarCorrect.clear()
self.comboPolarCorrect.addItems(list(self.polarisation_options.keys()))
self.comboPolarCorrect.setCurrentIndex(self.comboPolarCorrect.findText('None'))
self.current_polarisation_method = self.polarisation_options['None']
self.comboPolarCorrect.setEnabled(self.current_instrument in self.polarisation_instruments)
self.splitterList.setSizes([200, 800])
self.labelStatus = QtGui.QLabel("Ready")
self.statusMain.addWidget(self.labelStatus)
self._initialise_table()
self._populate_runs_list()
self._connect_slots()
return True
def _reset_table(self):
"""
Reset the plot buttons and stitch checkboxes back to thier defualt state
"""
# switches from current to true, to false to make sure stateChanged fires
self.checkTickAll.setCheckState(2)
self.checkTickAll.setCheckState(0)
for row in range(self.tableMain.rowCount()):
plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1]
self.__reset_plot_button(plotbutton)
def __reset_plot_button(self, plotbutton):
"""
Reset the provided plot button to ti's default state: disabled and with no cache
"""
plotbutton.setDisabled(True)
plotbutton.setProperty('runno', None)
plotbutton.setProperty('overlapLow', None)
plotbutton.setProperty('overlapHigh', None)
plotbutton.setProperty('wksp', None)
def _initialise_table(self):
"""
Initialise the table. Clearing all data and adding the checkboxes and plot buttons
"""
# first check if the table has been changed before clearing it
if self.mod_flag:
ret, _saved = self._save_check()
if ret == QtGui.QMessageBox.RejectRole:
return
self.current_table = None
settings = QtCore.QSettings()
settings.beginGroup(self.__column_settings)
for column in range(self.tableMain.columnCount()):
for row in range(self.tableMain.rowCount()):
if column in self.run_cols:
item = QtGui.QTableWidgetItem()
item.setText('')
item.setToolTip('Runs can be colon delimited to coadd them')
self.tableMain.setItem(row, column, item)
elif column in self.angle_cols:
item = QtGui.QTableWidgetItem()
item.setText('')
item.setToolTip('Angles are in degrees')
self.tableMain.setItem(row, column, item)
elif column == self.stitch_col:
check = QtGui.QCheckBox()
check.setCheckState(False)
check.setToolTip('If checked, the runs in this row will be stitched together')
item = QtGui.QWidget()
layout = QtGui.QHBoxLayout(item)
layout.addWidget(check)
layout.setAlignment(QtCore.Qt.AlignCenter)
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
item.setLayout(layout)
item.setContentsMargins(0, 0, 0, 0)
self.tableMain.setCellWidget(row, self.stitch_col, item)
elif column == self.plot_col:
button = QtGui.QPushButton('Plot')
button.setProperty("row", row)
self.__reset_plot_button(button)
button.setToolTip('Plot the workspaces produced by processing this row.')
button.clicked.connect(self._plot_row)
item = QtGui.QWidget()
layout = QtGui.QHBoxLayout(item)
layout.addWidget(button)
layout.setAlignment(QtCore.Qt.AlignCenter)
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
item.setLayout(layout)
item.setContentsMargins(0, 0, 0, 0)
self.tableMain.setCellWidget(row, self.plot_col, item)
else:
item = QtGui.QTableWidgetItem()
item.setText('')
self.tableMain.setItem(row, column, item)
vis_state = settings.value(str(column), True, type=bool)
self.shown_cols[column] = vis_state
if vis_state:
self.tableMain.showColumn(column)
else:
self.tableMain.hideColumn(column)
settings.endGroup()
del settings
self.tableMain.resizeColumnsToContents()
self.mod_flag = False
def _connect_slots(self):
"""
Connect the signals to the corresponding methods
"""
self.checkTickAll.stateChanged.connect(self._set_all_stitch)
self.comboInstrument.activated[int].connect(self._instrument_selected)
self.comboPolarCorrect.activated.connect(self._polar_corr_selected)
self.textRB.returnPressed.connect(self._populate_runs_list)
self.buttonAuto.clicked.connect(self._autofill)
self.buttonSearch.clicked.connect(self._populate_runs_list)
self.buttonClear.clicked.connect(self._initialise_table)
self.buttonProcess.clicked.connect(self._process)
self.buttonTransfer.clicked.connect(self._transfer)
self.buttonColumns.clicked.connect(self._choose_columns)
self.actionOpen_Table.triggered.connect(self._load_table)
self.actionReload_from_Disk.triggered.connect(self._reload_table)
self.actionSave.triggered.connect(self._save)
self.actionSave_As.triggered.connect(self._save_as)
self.actionSave_Workspaces.triggered.connect(self._save_workspaces)
self.actionClose_Refl_Gui.triggered.connect(self.close)
self.actionMantid_Help.triggered.connect(self._show_help)
self.actionAutofill.triggered.connect(self._autofill)
self.actionSearch_RB.triggered.connect(self._populate_runs_list)
self.actionClear_Table.triggered.connect(self._initialise_table)
self.actionProcess.triggered.connect(self._process)
self.actionTransfer.triggered.connect(self._transfer)
self.tableMain.cellChanged.connect(self._table_modified)
self.actionClear.triggered.connect(self._clear_cells)
self.actionPaste.triggered.connect(self._paste_cells)
self.actionCut.triggered.connect(self._cut_cells)
self.actionCopy.triggered.connect(self._copy_cells)
self.actionChoose_Columns.triggered.connect(self._choose_columns)
self.actionRefl_Gui_Options.triggered.connect(self._options_dialog)
self.actionSlit_Calculator.triggered.connect(self._show_slit_calculator)
def __valid_rb(self):
# Ensure that you cannot put zero in for an rb search
rbSearchValidator = QtGui.QIntValidator(self)
current_text = self.textRB.text()
rbSearchValidator.setBottom(1)
state = rbSearchValidator.validate(current_text, 0)[0]
if state == QtGui.QValidator.Acceptable:
return True
else:
self.textRB.clear()
if current_text:
logger.warning("RB search restricted to numbers > 0")
return False
def _populate_runs_list(self):
"""
Populate the list at the right with names of runs and workspaces from the archives
"""
# Clear existing
self.listMain.clear()
if self.__valid_rb():
# Use ICAT for a journal search based on the RB number
active_session_id = None
if CatalogManager.numberActiveSessions() == 0:
# Execute the CatalogLoginDialog
login_alg = CatalogLoginDialog()
session_object = login_alg.getProperty("KeepAlive").value
active_session_id = session_object.getPropertyValue("Session")
# Fetch out an existing session id
active_session_id = CatalogManager.getActiveSessions()[-1].getSessionId()
# This might be another catalog session, but at present there is no way to tell.
search_alg = AlgorithmManager.create('CatalogGetDataFiles')
search_alg.initialize()
search_alg.setChild(True) # Keeps the results table out of the ADS
search_alg.setProperty('InvestigationId', str(self.textRB.text()))
search_alg.setProperty('Session', active_session_id)
search_alg.setPropertyValue('OutputWorkspace', '_dummy')
search_alg.execute()
search_results = search_alg.getProperty('OutputWorkspace').value
self.icat_file_map = {}
self.statusMain.clearMessage()
for row in search_results:
file_name = row['Name']
file_id = row['Id']
description = row['Description']
run_number = re.search(r'[1-9]\d+', file_name).group()
if bool(re.search('(raw)$', file_name, re.IGNORECASE)): # Filter to only display and map raw files.
title = (run_number + ': ' + description).strip()
self.icat_file_map[title] = (file_id, run_number, file_name)
self.listMain.addItem(title)
self.listMain.sortItems()
del search_results
def _autofill(self):
"""
copy the contents of the selected cells to the row below as long as the row below contains a run number in the first cell
"""
# make sure all selected cells are in the same row
sum = 0
howMany = len(self.tableMain.selectedItems())
for cell in self.tableMain.selectedItems():
sum = sum + self.tableMain.row(cell)
if howMany:
selectedrow = self.tableMain.row(self.tableMain.selectedItems()[0])
if sum / howMany == selectedrow:
startrow = selectedrow + 1
filled = 0
for cell in self.tableMain.selectedItems():
row = startrow
txt = cell.text()
while self.tableMain.item(row, 0).text() != '':
item = QtGui.QTableWidgetItem()
item.setText(txt)
self.tableMain.setItem(row, self.tableMain.column(cell), item)
row = row + 1
filled = filled + 1
if not filled:
QtGui.QMessageBox.critical(self.tableMain,
'Cannot perform Autofill',
"No target cells to autofill. Rows to be filled should contain a run number in their "
"first cell, and start from directly below the selected line.")
else:
QtGui.QMessageBox.critical(self.tableMain, 'Cannot perform Autofill',
"Selected cells must all be in the same row.")
else:
QtGui.QMessageBox.critical(self.tableMain, 'Cannot perform Autofill', "There are no source cells selected.")
def _clear_cells(self):
"""
Clear the selected area of data
"""
cells = self.tableMain.selectedItems()
for cell in cells:
column = cell.column()
if column < self.stitch_col:
cell.setText('')
def _cut_cells(self):
"""
copy the selected cells then clear the area
"""
self._copy_cells()
self._clear_cells()
def _copy_cells(self):
"""
Copy the selected ranage of cells to the clipboard
"""
cells = self.tableMain.selectedItems()
if not cells:
print
'nothing to copy'
return
# first discover the size of the selection and initialise a list
mincol = cells[0].column()
if mincol > self.scale_col:
logger.error("Cannot copy, all cells out of range")
return
maxrow = -1
maxcol = -1
minrow = cells[0].row()
for cell in reversed(range(len(cells))):
col = cells[cell].column()
if col < self.stitch_col:
maxcol = col
maxrow = cells[cell].row()
break
colsize = maxcol - mincol + 1
rowsize = maxrow - minrow + 1
selection = [['' for x in range(colsize)] for y in range(rowsize)]
# now fill that list
for cell in cells:
row = cell.row()
col = cell.column()
if col < self.stitch_col:
selection[row - minrow][col - mincol] = str(cell.text())
tocopy = ''
for y in range(rowsize):
for x in range(colsize):
if x > 0:
tocopy += '\t'
tocopy += selection[y][x]
if y < (rowsize - 1):
tocopy += '\n'
self.clip.setText(str(tocopy))
def _paste_cells(self):
"""
Paste the contents of the clipboard to the table at the selected position
"""
pastedtext = self.clip.text()
if not pastedtext:
logger.warning("Nothing to Paste")
return
selected = self.tableMain.selectedItems()
if not selected:
logger.warning("Cannot paste, no editable cells selected")
return
pasted = pastedtext.splitlines()
pastedcells = []
for row in pasted:
pastedcells.append(row.split('\t'))
pastedcols = len(pastedcells[0])
pastedrows = len(pastedcells)
if len(selected) > 1:
# discover the size of the selection
mincol = selected[0].column()
if mincol > self.scale_col:
logger.error("Cannot copy, all cells out of range")
return
minrow = selected[0].row()
# now fill that list
for cell in selected:
row = cell.row()
col = cell.column()
if col < self.stitch_col and (col - mincol) < pastedcols and (row - minrow) < pastedrows and len(
pastedcells[row - minrow]):
cell.setText(pastedcells[row - minrow][col - mincol])
elif selected:
# when only a single cell is selected, paste all the copied item up until the table limits
cell = selected[0]
currow = cell.row()
homecol = cell.column()
tablerows = self.tableMain.rowCount()
for row in pastedcells:
if len(row):
curcol = homecol
if currow < tablerows:
for col in row:
if curcol < self.stitch_col:
curcell = self.tableMain.item(currow, curcol)
curcell.setText(col)
curcol += 1
else:
# the row has hit the end of the editable cells
break
currow += 1
else:
# it's dropped off the bottom of the table
break
else:
logger.warning("Cannot paste, no editable cells selected")
def _transfer(self):
"""
Transfer run numbers to the table
"""
tup = ()
for idx in self.listMain.selectedItems():
split_title = re.split(":th=|th=|:|dq/q=", idx.text())
if len(split_title) < 3:
split_title = re.split(":", idx.text())
if len(split_title) < 2:
logger.warning('cannot transfer ' + idx.text() + ' title is not in the right form ')
continue
else:
theta = 0
split_title.append(theta) # Append a dummy theta value.
if len(split_title) < 4:
dqq = 0
split_title.append(dqq) # Append a dummy dq/q value.
tup = tup + (split_title,) # Tuple of lists containing (run number, title, theta, dq/q)
tupsort = sorted(tup, key=itemgetter(1, 2)) # now sorted by title then theta
row = 0
for _key, group in itertools.groupby(tupsort, lambda x: x[1]): # now group by title
col = 0
dqq = 0 # only one value of dqq per row
run_angle_pairs_of_title = list() # for storing run_angle pairs all with the same title
for object in group: # loop over all with equal title
run_no = object[0]
dqq = object[-1]
angle = object[-2]
run_angle_pairs_of_title.append((run_no, angle))
for angle_key, group in itertools.groupby(run_angle_pairs_of_title, lambda x: x[1]):
runnumbers = "+".join(["%s" % pair[0] for pair in group])
# set the runnumber
item = QtGui.QTableWidgetItem()
item.setText(str(runnumbers))
self.tableMain.setItem(row, col, item)
# Set the angle
item = QtGui.QTableWidgetItem()
item.setText(str(angle_key))
self.tableMain.setItem(row, col + 1, item)
# Set the transmission
item = QtGui.QTableWidgetItem()
item.setText(self.textRuns.text())
self.tableMain.setItem(row, col + 2, item)
col = col + 5
if col >= 11:
col = 0
# set dq/q
item = QtGui.QTableWidgetItem()
item.setText(str(dqq))
self.tableMain.setItem(row, 15, item)
row = row + 1
if self.__icat_download:
# If ICAT is being used for download, then files must be downloaded at the same time as they are transferred
contents = str(idx.text()).strip()
file_id, _runnumber, file_name = self.icat_file_map[contents]
active_session_id = CatalogManager.getActiveSessions()[-1].getSessionId()
# This might be another catalog session, but at present there is no way to tell.
save_location = config['defaultsave.directory']
CatalogDownloadDataFiles(file_id, FileNames=file_name, DownloadPath=save_location,
Session=active_session_id)
current_search_dirs = config.getDataSearchDirs()
if save_location not in current_search_dirs:
config.appendDataSearchDir(save_location)
def _set_all_stitch(self, state):
"""
Set the checkboxes in the Stitch? column to the same
"""
for row in range(self.tableMain.rowCount()):
self.tableMain.cellWidget(row, self.stitch_col).children()[1].setCheckState(state)
def __checked_row_stiched(self, row):
return self.tableMain.cellWidget(row, self.stitch_col).children()[1].checkState() > 0
def _process(self):
"""
Process has been pressed, check what has been selected then pass the selection (or whole table) to quick
"""
# --------- If "Process" button pressed, convert raw files to IvsLam and IvsQ and combine if checkbox ticked -------------
_overallQMin = float("inf")
_overallQMax = float("-inf")
try:
willProcess = True
rows = self.tableMain.selectionModel().selectedRows()
rowIndexes = []
for idx in rows:
rowIndexes.append(idx.row())
if not len(rowIndexes):
reply = QtGui.QMessageBox.question(self.tableMain, 'Process all rows?',
"This will process all rows in the table. Continue?",
QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.No:
logger.notice("Cancelled!")
willProcess = False
else:
rowIndexes = range(self.tableMain.rowCount())
if willProcess:
for row in rowIndexes: # range(self.tableMain.rowCount()):
runno = []
wksp = []
overlapLow = []
overlapHigh = []
if self.tableMain.item(row, 0).text() != '':
self.statusMain.showMessage("Processing row: " + str(row + 1))
logger.debug("Processing row: " + str(row + 1))
for i in range(3):
run_entry = str(self.tableMain.item(row, i * 5).text())
if run_entry != '':
runno.append(run_entry)
ovLow = str(self.tableMain.item(row, (i * 5) + 3).text())
if ovLow != '':
overlapLow.append(float(ovLow))
ovHigh = str(self.tableMain.item(row, (i * 5) + 4).text())
if ovHigh != '':
overlapHigh.append(float(ovHigh))
# Determine resolution
if self.tableMain.item(row, 15).text() == '':
loadedRun = None
if load_live_runs.is_live_run(runno[0]):
loadedRun = load_live_runs.get_live_data(config['default.instrument'],
frequency=self.live_freq,
accumulation=self.live_method)
else:
Load(Filename=runno[0], OutputWorkspace="_run")
loadedRun = mtd["_run"]
theta_in_str = str(self.tableMain.item(row, 1).text())
try:
theta_in = None
if len(theta_in_str) > 0:
theta_in = float(theta_in_str)
# Make sure we only ever run calculate resolution on a non-group workspace.
# If we're given a group workspace, we can just run it on the first member of the group instead
thetaRun = loadedRun
if isinstance(thetaRun, WorkspaceGroup):
thetaRun = thetaRun[0]
if not theta_in:
theta_in = getLogValue(thetaRun, "Theta")
dqq = NRCalculateSlitResolution(Workspace=thetaRun, TwoTheta=2*theta_in)
# Put the calculated resolution into the table
resItem = QtGui.QTableWidgetItem()
resItem.setText(str(dqq))
self.tableMain.setItem(row, 15, resItem)
# Update the value for theta_in in the table
ttItem = QtGui.QTableWidgetItem()
ttItem.setText(str(theta_in))
self.tableMain.setItem(row, 1, ttItem)
logger.notice("Calculated resolution: " + str(dqq))
except:
self.statusMain.clearMessage()
logger.error(
"Failed to calculate dq/q because we could not find theta in the workspace's sample log. "
"Try entering theta or dq/q manually.")
return
else:
dqq = float(self.tableMain.item(row, 15).text())
# Check secondary and tertiary theta_in columns, if they're
# blank and their corresponding run columns are set, fill them.
for run_col in [5, 10]:
tht_col = run_col + 1
run_val = str(self.tableMain.item(row, run_col).text())
tht_val = str(self.tableMain.item(row, tht_col).text())
if run_val and not tht_val:
Load(Filename=run_val, OutputWorkspace="_run")
loadedRun = mtd["_run"]
tht_val = getLogValue(loadedRun, "Theta")
if tht_val:
self.tableMain.item(row, tht_col).setText(str(tht_val))
# Populate runlist
first_wq = None
for i in range(0, len(runno)):
theta, qmin, qmax, _wlam, wqBinnedAndScaled, _wqUnBinnedAndUnScaled = \
self._do_run(runno[i], row, i)
if not first_wq:
first_wq = wqBinnedAndScaled # Cache the first Q workspace
theta = round(theta, 3)
qmin = round(qmin, 3)
qmax = round(qmax, 3)
wksp.append(wqBinnedAndScaled.name())
if self.tableMain.item(row, i * 5 + 1).text() == '':
item = QtGui.QTableWidgetItem()
item.setText(str(theta))
self.tableMain.setItem(row, i * 5 + 1, item)
if self.tableMain.item(row, i * 5 + 3).text() == '':
item = QtGui.QTableWidgetItem()
item.setText(str(qmin))
self.tableMain.setItem(row, i * 5 + 3, item)
overlapLow.append(qmin)
if self.tableMain.item(row, i * 5 + 4).text() == '':
item = QtGui.QTableWidgetItem()
item.setText(str(qmax))
self.tableMain.setItem(row, i * 5 + 4, item)
overlapHigh.append(qmax)
if wksp[i].find(',') > 0 or wksp[i].find(':') > 0:
wksp[i] = first_wq.name()
if self.__checked_row_stiched(row):
if len(runno) == 1:
logger.notice("Nothing to combine for processing row : " + str(row))
else:
w1 = getWorkspace(wksp[0])
w2 = getWorkspace(wksp[-1])
if len(runno) == 2:
outputwksp = runno[0] + '_' + runno[1][3:]
else:
outputwksp = runno[0] + '_' + runno[-1][3:]
# get Qmax
if self.tableMain.item(row, i * 5 + 4).text() == '':
overlapHigh = 0.3 * max(w1.readX(0))
Qmin = min(w1.readX(0))
Qmax = max(w2.readX(0))
if len(self.tableMain.item(row, i * 5 + 3).text()) > 0:
Qmin = float(self.tableMain.item(row, i * 5 + 3).text())
if len(self.tableMain.item(row, i * 5 + 4).text()) > 0:
Qmax = float(self.tableMain.item(row, i * 5 + 4).text())
if Qmax > _overallQMax:
_overallQMax = Qmax
if Qmin < _overallQMin:
_overallQMin = Qmin
combineDataMulti(wksp, outputwksp, overlapLow, overlapHigh,
_overallQMin, _overallQMax, -dqq, 1, keep=True,
scale_right=self.__scale_right)
# Enable the plot button
plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1]
plotbutton.setProperty('runno', runno)
plotbutton.setProperty('overlapLow', overlapLow)
plotbutton.setProperty('overlapHigh', overlapHigh)
plotbutton.setProperty('wksp', wksp)
plotbutton.setEnabled(True)
self.statusMain.clearMessage()
self.accMethod = None
self.statusMain.clearMessage()
except:
self.statusMain.clearMessage()
raise
def _plot(self, plotbutton):
"""
Plot the row belonging to the selected button
"""
if not isinstance(plotbutton, QtGui.QPushButton):
logger.error("Problem accessing cached data: Wrong data type passed, expected QtGui.QPushbutton")
return
import unicodedata
# make sure the required data can be retrieved properly
try:
runno_u = plotbutton.property('runno')
runno = []
for uni in runno_u:
runno.append(unicodedata.normalize('NFKD', uni).encode('ascii', 'ignore'))
wksp_u = plotbutton.property('wksp')
wksp = []
for uni in wksp_u:
wksp.append(unicodedata.normalize('NFKD', uni).encode('ascii', 'ignore'))
overlapLow = plotbutton.property('overlapLow')
overlapHigh = plotbutton.property('overlapHigh')
row = plotbutton.property('row')
wkspBinned = []
w1 = getWorkspace(wksp[0])
w2 = getWorkspace(wksp[len(wksp) - 1])
dqq = float(self.tableMain.item(row, 15).text())
except:
logger.error("Unable to plot row, required data couldn't be retrieved")
self.__reset_plot_button(plotbutton)
return
for i in range(len(runno)):
if len(overlapLow):
Qmin = overlapLow[0]
else:
Qmin = min(w1.readX(0))
if len(overlapHigh):
Qmax = overlapHigh[len(overlapHigh) - 1]
else:
Qmax = max(w2.readX(0))
ws_name_binned = wksp[i]
wkspBinned.append(ws_name_binned)
wsb = getWorkspace(ws_name_binned)
_Imin = min(wsb.readY(0))
_Imax = max(wsb.readY(0))
if canMantidPlot:
# Get the existing graph if it exists
base_graph = self.__graphs.get(wksp[0], None)
# Clear the window if we're the first of a new set of curves
clearWindow = (i == 0)
# Plot the new curve
base_graph = plotSpectrum(ws_name_binned, 0, True, window=base_graph, clearWindow=clearWindow)
# Save the graph so we can re-use it
self.__graphs[wksp[i]] = base_graph
titl = groupGet(ws_name_binned, 'samp', 'run_title')
if isinstance(titl, str):
base_graph.activeLayer().setTitle(titl)
base_graph.activeLayer().setAxisScale(Layer.Left, _Imin * 0.1, _Imax * 10, Layer.Log10)
base_graph.activeLayer().setAxisScale(Layer.Bottom, Qmin * 0.9, Qmax * 1.1, Layer.Log10)
base_graph.activeLayer().setAutoScale()
# Create and plot stitched outputs
if self.__checked_row_stiched(row):
if len(runno) == 2:
outputwksp = runno[0] + '_' + runno[1][3:]
else:
outputwksp = runno[0] + '_' + runno[2][3:]
if not getWorkspace(outputwksp, report_error=False):
# Stitching has not been done as part of processing, so we need to do it here.
combineDataMulti(wkspBinned, outputwksp, overlapLow, overlapHigh, Qmin, Qmax, -dqq, 1,
keep=True, scale_right=self.__scale_right)
Qmin = min(getWorkspace(outputwksp).readX(0))
Qmax = max(getWorkspace(outputwksp).readX(0))
if canMantidPlot:
stitched_graph = self.__graphs.get(outputwksp, None)
stitched_graph = plotSpectrum(outputwksp, 0, True, window=stitched_graph, clearWindow=True)
titl = groupGet(outputwksp, 'samp', 'run_title')
stitched_graph.activeLayer().setTitle(titl)
stitched_graph.activeLayer().setAxisScale(Layer.Left, 1e-8, 100.0, Layer.Log10)
stitched_graph.activeLayer().setAxisScale(Layer.Bottom, Qmin * 0.9, Qmax * 1.1, Layer.Log10)
self.__graphs[outputwksp] = stitched_graph
def __name_trans(self, transrun):
"""
From a comma or colon separated string of run numbers
construct an output workspace name for the transmission workspace that fits the form
TRANS_{trans_1}_{trans_2}
"""
if bool(re.search("^(TRANS)", transrun)):
# The user has deliberately tried to supply the transmission run directly
return transrun
else:
split_trans = re.split(',|:', transrun)
if len(split_trans) == 0:
return None
name = 'TRANS'
for t in split_trans:
name += '_' + str(t)
return name
def _do_run(self, runno, row, which):
"""
Run quick on the given run and row
"""
transrun = str(self.tableMain.item(row, (which * 5) + 2).text())
# Formulate a WS Name for the processed transmission run.
transrun_named = self.__name_trans(transrun)
# Look for existing transmission workspaces that match the name
transmission_ws = None
if mtd.doesExist(transrun_named):
if isinstance(mtd[transrun_named], WorkspaceGroup):
unit = mtd[transrun_named][0].getAxis(0).getUnit().unitID()
else:
unit = mtd[transrun_named].getAxis(0).getUnit().unitID()
if unit == "Wavelength":
logger.notice('Reusing transmission workspace ' + transrun_named)
transmission_ws = mtd[transrun_named]
angle_str = str(self.tableMain.item(row, which * 5 + 1).text())
if len(angle_str) > 0:
angle = float(angle_str)
else:
angle = None
loadedRun = runno
if load_live_runs.is_live_run(runno):
load_live_runs.get_live_data(config['default.instrument'], frequency=self.live_freq,
accumulation=self.live_method)
wlam, wq, th, wqBinned = None, None, None, None
# Only make a transmission workspace if we need one.
if transrun and not transmission_ws:
converter = ConvertToWavelength(transrun)
size = converter.get_ws_list_size()
out_ws_name = transrun_named
if size == 1:
trans1 = converter.get_workspace_from_list(0)
transmission_ws = CreateTransmissionWorkspaceAuto(FirstTransmissionRun=trans1,
OutputWorkspace=out_ws_name,
Params=0.02, StartOverlap=10.0, EndOverlap=12.0,
Version=1)
elif size == 2:
trans1 = converter.get_workspace_from_list(0)
trans2 = converter.get_workspace_from_list(1)
transmission_ws = CreateTransmissionWorkspaceAuto(FirstTransmissionRun=trans1,
OutputWorkspace=out_ws_name,
SecondTransmissionRun=trans2, Params=0.02,
StartOverlap=10.0, EndOverlap=12.0, Version=1)
else:
raise RuntimeError("Up to 2 transmission runs can be specified. No more than that.")
# Load the runs required ConvertToWavelength will deal with the transmission runs, while .to_workspace will deal with the run itself
ws = ConvertToWavelength.to_workspace(loadedRun, ws_prefix="")
if self.__alg_use:
if self.tableMain.item(row, self.scale_col).text():
factor = float(self.tableMain.item(row, self.scale_col).text())
else:
factor = 1.0
if self.tableMain.item(row, 15).text():
Qstep = float(self.tableMain.item(row, 15).text())
else:
Qstep = None
if len(self.tableMain.item(row, which * 5 + 3).text()) > 0:
Qmin = float(self.tableMain.item(row, which * 5 + 3).text())
else:
Qmin = None
if len(self.tableMain.item(row, which * 5 + 4).text()) > 0:
Qmax = float(self.tableMain.item(row, which * 5 + 4).text())
else:
Qmax = None
# If we're dealing with a workspace group, we'll manually map execution over each group member
# We do this so we can get ThetaOut correctly (see ticket #10597 for why we can't at the moment)
if isinstance(ws, WorkspaceGroup):
wqGroupBinned = []
wqGroup = []
wlamGroup = []
thetaGroup = []
group_trans_ws = transmission_ws
for i in range(0, ws.size()):
# If the transmission workspace is a group, we'll use it pair-wise with the tof workspace group
if isinstance(transmission_ws, WorkspaceGroup):
group_trans_ws = transmission_ws[i]
alg = AlgorithmManager.create("ReflectometryReductionOneAuto")
alg.initialize()
alg.setProperty("InputWorkspace", ws[i])
if group_trans_ws:
alg.setProperty("FirstTransmissionRun", group_trans_ws)
if angle is not None:
alg.setProperty("ThetaIn", angle)
alg.setProperty("OutputWorkspaceBinned", runno + '_IvsQ_binned_' + str(i + 1))
alg.setProperty("OutputWorkspace", runno + '_IvsQ_' + str(i + 1))
alg.setProperty("OutputWorkspaceWavelength", runno + '_IvsLam_' + str(i + 1))
alg.setProperty("ScaleFactor", factor)
if Qstep is not None:
alg.setProperty("MomentumTransferStep", Qstep)
if Qmin is not None:
alg.setProperty("MomentumTransferMin", Qmin)
if Qmax is not None:
alg.setProperty("MomentumTransferMax", Qmax)
alg.execute()
wqBinned = mtd[runno + '_IvsQ_binned_' + str(i + 1)]
wq = mtd[runno + '_IvsQ_' + str(i + 1)]
wlam = mtd[runno + '_IvsLam_' + str(i + 1)]
th = alg.getProperty("ThetaIn").value
wqGroupBinned.append(wqBinned)
wqGroup.append(wq)
wlamGroup.append(wlam)
thetaGroup.append(th)
wqBinned = GroupWorkspaces(InputWorkspaces=wqGroupBinned, OutputWorkspace=runno + '_IvsQ_binned')
wq = GroupWorkspaces(InputWorkspaces=wqGroup, OutputWorkspace=runno + '_IvsQ')
wlam = GroupWorkspaces(InputWorkspaces=wlamGroup, OutputWorkspace=runno + '_IvsLam')
th = thetaGroup[0]
else:
alg = AlgorithmManager.create("ReflectometryReductionOneAuto")
alg.initialize()
alg.setProperty("InputWorkspace", ws)
if transmission_ws:
alg.setProperty("FirstTransmissionRun", transmission_ws)
if angle is not None:
alg.setProperty("ThetaIn", angle)
alg.setProperty("OutputWorkspaceBinned", runno + '_IvsQ_binned')
alg.setProperty("OutputWorkspace", runno + '_IvsQ')
alg.setProperty("OutputWorkspaceWavelength", runno + '_IvsLam')
alg.setProperty("ScaleFactor", factor)
if Qstep is not None:
alg.setProperty("MomentumTransferStep", Qstep)
if Qmin is not None:
alg.setProperty("MomentumTransferMin", Qmin)
if Qmax is not None:
alg.setProperty("MomentumTransferMax", Qmax)
alg.execute()
wqBinned = mtd[runno + '_IvsQ_binned']
wq = mtd[runno + '_IvsQ']
wlam = mtd[runno + '_IvsLam']
th = alg.getProperty("ThetaIn").value
cleanup()
else:
wlam, wq, th = quick(loadedRun, trans=transmission_ws, theta=angle, tof_prefix="")
if self.__group_tof_workspaces and not isinstance(ws, WorkspaceGroup):
if "TOF" in mtd:
tof_group = mtd["TOF"]
if not tof_group.contains(loadedRun):
tof_group.add(loadedRun)
else:
tof_group = GroupWorkspaces(InputWorkspaces=loadedRun, OutputWorkspace="TOF")
if ':' in runno:
runno = runno.split(':')[0]
if ',' in runno:
runno = runno.split(',')[0]
if isinstance(wq, WorkspaceGroup):
inst = wq[0].getInstrument()
else:
inst = wq.getInstrument()
lmin = inst.getNumberParameter('LambdaMin')[0]
lmax = inst.getNumberParameter('LambdaMax')[0]
qmin = 4 * math.pi / lmax * math.sin(th * math.pi / 180)
qmax = 4 * math.pi / lmin * math.sin(th * math.pi / 180)
return th, qmin, qmax, wlam, wqBinned, wq
def _save_table_contents(self, filename):
"""
Save the contents of the table
"""
try:
writer = csv.writer(open(filename, "wb"))
for row in range(self.tableMain.rowCount()):
rowtext = []
for column in range(self.tableMain.columnCount() - 2):
rowtext.append(self.tableMain.item(row, column).text())
if len(rowtext) > 0:
writer.writerow(rowtext)
self.current_table = filename
logger.notice("Saved file to " + filename)
self.mod_flag = False
except:
return False
self.mod_flag = False
return True
def _save(self, failsave=False):
"""
Save the table, showing no interface if not necessary. This also provides the failing save functionality.
"""
filename = ''
if failsave:
# this is an emergency autosave as the program is failing
logger.error(
"The ISIS Reflectonomy GUI has encountered an error, it will now attempt to save a copy of your work.")
msgBox = QtGui.QMessageBox()
msgBox.setText(
"The ISIS Reflectonomy GUI has encountered an error, it will now attempt to save a copy of your work.\n"
"Please check the log for details.")
msgBox.setStandardButtons(QtGui.QMessageBox.Ok)
msgBox.setIcon(QtGui.QMessageBox.Critical)
msgBox.setDefaultButton(QtGui.QMessageBox.Ok)
msgBox.setEscapeButton(QtGui.QMessageBox.Ok)
msgBox.exec_()
import datetime
failtime = datetime.datetime.today().strftime('%Y-%m-%d_%H-%M-%S')
if self.current_table:
filename = self.current_table.rsplit('.', 1)[0] + "_recovered_" + failtime + ".tbl"
else:
mantidDefault = config['defaultsave.directory']
if os.path.exists(mantidDefault):
filename = os.path.join(mantidDefault, "mantid_reflectometry_recovered_" + failtime + ".tbl")
else:
import tempfile
tempDir = tempfile.gettempdir()
filename = os.path.join(tempDir, "mantid_reflectometry_recovered_" + failtime + ".tbl")
else:
# this is a save-on-quit or file->save
if self.current_table:
filename = self.current_table
else:
saveDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Save Table")
saveDialog.setFileMode(QtGui.QFileDialog.AnyFile)
saveDialog.setNameFilter("Table Files (*.tbl);;All files (*)")
saveDialog.setDefaultSuffix("tbl")
saveDialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
if saveDialog.exec_():
filename = saveDialog.selectedFiles()[0]
else:
return False
return self._save_table_contents(filename)
def _save_as(self):
"""
show the save as dialog and save to a .tbl file with that name
"""
saveDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Save Table")
saveDialog.setFileMode(QtGui.QFileDialog.AnyFile)
saveDialog.setNameFilter("Table Files (*.tbl);;All files (*)")
saveDialog.setDefaultSuffix("tbl")
saveDialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
if saveDialog.exec_():
filename = saveDialog.selectedFiles()[0]
self._save_table_contents(filename)
def _load_table(self):
"""
Load a .tbl file from disk
"""
self.loading = True
loadDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Open Table")
loadDialog.setFileMode(QtGui.QFileDialog.ExistingFile)
loadDialog.setNameFilter("Table Files (*.tbl);;All files (*)")
if loadDialog.exec_():
try:
# before loading make sure you give them a chance to save<|fim▁hole|> # if they hit cancel abort the load
self.loading = False
return
self._reset_table()
filename = loadDialog.selectedFiles()[0]
self.current_table = filename
reader = csv.reader(open(filename, "rb"))
row = 0
for line in reader:
if row < 100:
for column in range(self.tableMain.columnCount() - 2):
item = QtGui.QTableWidgetItem()
item.setText(line[column])
self.tableMain.setItem(row, column, item)
row = row + 1
except:
logger.error('Could not load file: ' + str(filename) + '. File not found or unable to read from file.')
self.loading = False
self.mod_flag = False
def _reload_table(self):
"""
Reload the last loaded file from disk, replacing anything in the table already
"""
self.loading = True
filename = self.current_table
if filename:
if self.mod_flag:
msgBox = QtGui.QMessageBox()
msgBox.setText(
"The table has been modified. Are you sure you want to reload the table and lose your changes?")
msgBox.setStandardButtons(QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
msgBox.setIcon(QtGui.QMessageBox.Question)
msgBox.setDefaultButton(QtGui.QMessageBox.Yes)
msgBox.setEscapeButton(QtGui.QMessageBox.No)
ret = msgBox.exec_()
if ret == QtGui.QMessageBox.No:
# if they hit No abort the reload
self.loading = False
return
try:
self._reset_table()
reader = csv.reader(open(filename, "rb"))
row = 0
for line in reader:
if row < 100:
for column in range(self.tableMain.columnCount() - 2):
item = QtGui.QTableWidgetItem()
item.setText(line[column])
self.tableMain.setItem(row, column, item)
row = row + 1
self.mod_flag = False
except:
logger.error('Could not load file: ' + str(filename) + '. File not found or unable to read from file.')
else:
logger.notice('No file in table to reload.')
self.loading = False
def _save_workspaces(self):
"""
Shows the export dialog for saving workspaces to non mantid formats
"""
try:
Dialog = QtGui.QDialog()
u = Ui_SaveWindow()
u.setupUi(Dialog)
Dialog.exec_()
except Exception as ex:
logger.notice("Could not open save workspace dialog")
logger.notice(str(ex))
def _options_dialog(self):
"""
Shows the dialog for setting options regarding live data
"""
try:
dialog_controller = ReflOptions(def_method=self.live_method, def_freq=self.live_freq,
def_alg_use=self.__alg_use,
def_icat_download=self.__icat_download,
def_group_tof_workspaces=self.__group_tof_workspaces,
def_stitch_right=self.__scale_right)
if dialog_controller.exec_():
# Fetch the settings back off the controller
self.live_freq = dialog_controller.frequency()
self.live_method = dialog_controller.method()
self.__alg_use = dialog_controller.useAlg()
self.__icat_download = dialog_controller.icatDownload()
self.__group_tof_workspaces = dialog_controller.groupTOFWorkspaces()
self.__scale_right = dialog_controller.stitchRight()
# Persist the settings
settings = QtCore.QSettings()
settings.beginGroup(self.__live_data_settings)
settings.setValue(self.__live_data_frequency_key, self.live_freq)
settings.setValue(self.__live_data_method_key, self.live_method)
settings.endGroup()
settings.beginGroup(self.__generic_settings)
settings.setValue(self.__ads_use_key, self.__alg_use)
settings.setValue(self.__icat_download_key, self.__icat_download)
settings.setValue(self.__group_tof_workspaces_key, self.__group_tof_workspaces)
settings.setValue(self.__stitch_right_key, self.__scale_right)
settings.endGroup()
del settings
except Exception as ex:
logger.notice("Problem opening options dialog or problem retrieving values from dialog")
logger.notice(str(ex))
def _choose_columns(self):
"""
shows the choose columns dialog for hiding and revealing of columns
"""
try:
dialog = ReflChoose(self.shown_cols, self.tableMain)
if dialog.exec_():
settings = QtCore.QSettings()
settings.beginGroup(self.__column_settings)
for key, value in dialog.visiblestates.iteritems():
self.shown_cols[key] = value
settings.setValue(str(key), value)
if value:
self.tableMain.showColumn(key)
else:
self.tableMain.hideColumn(key)
settings.endGroup()
del settings
except Exception as ex:
logger.notice("Could not open choose columns dialog")
logger.notice(str(ex))
def _show_help(self):
"""
Launches the wiki page for this interface
"""
import webbrowser
webbrowser.open('http://www.mantidproject.org/ISIS_Reflectometry_GUI')
def getLogValue(wksp, field=''):
"""
returns the last value from a sample log
"""
ws = getWorkspace(wksp)
log = ws.getRun().getLogData(field).value
if isinstance(log, int) or isinstance(log, str):
return log
else:
return log[-1]
def getWorkspace(wksp, report_error=True):
"""
Gets the first workspace associated with the given string. Does not load.
"""
if isinstance(wksp, Workspace):
return wksp
elif isinstance(wksp, str):
exists = mtd.doesExist(wksp)
if not exists:
if report_error:
logger.error("Unable to get workspace: " + str(wksp))
return exists # Doesn't exist
else:
return exists # Doesn't exist
elif isinstance(mtd[wksp], WorkspaceGroup):
wout = mtd[wksp][0]
else:
wout = mtd[wksp]
return wout<|fim▁end|> | if self.mod_flag:
ret, _saved = self._save_check()
if ret == QtGui.QMessageBox.RejectRole: |
<|file_name|>VanChartHyperLinkPane.java<|end_file_name|><|fim▁begin|>package com.fr.plugin.chart.custom.component;
import com.fr.chart.chartattr.Plot;
import com.fr.chart.web.ChartHyperPoplink;
import com.fr.chart.web.ChartHyperRelateCellLink;
import com.fr.chart.web.ChartHyperRelateFloatLink;
import com.fr.design.ExtraDesignClassManager;
import com.fr.design.beans.BasicBeanPane;
import com.fr.design.chart.javascript.ChartEmailPane;
import com.fr.design.chart.series.SeriesCondition.impl.ChartHyperPoplinkPane;
import com.fr.design.chart.series.SeriesCondition.impl.ChartHyperRelateCellLinkPane;
import com.fr.design.chart.series.SeriesCondition.impl.ChartHyperRelateFloatLinkPane;
import com.fr.design.chart.series.SeriesCondition.impl.FormHyperlinkPane;
import com.fr.design.designer.TargetComponent;
import com.fr.design.fun.HyperlinkProvider;
import com.fr.design.gui.HyperlinkFilterHelper;
import com.fr.design.gui.controlpane.NameObjectCreator;
import com.fr.design.gui.controlpane.NameableCreator;
import com.fr.design.gui.imenutable.UIMenuNameableCreator;
import com.fr.design.hyperlink.ReportletHyperlinkPane;
import com.fr.design.hyperlink.WebHyperlinkPane;
import com.fr.design.javascript.JavaScriptImplPane;
import com.fr.design.javascript.ParameterJavaScriptPane;
import com.fr.design.module.DesignModuleFactory;
import com.fr.general.FRLogger;
import com.fr.general.Inter;
import com.fr.general.NameObject;
import com.fr.js.EmailJavaScript;
import com.fr.js.FormHyperlinkProvider;
import com.fr.js.JavaScript;
import com.fr.js.JavaScriptImpl;
import com.fr.js.NameJavaScript;
import com.fr.js.NameJavaScriptGroup;
import com.fr.js.ParameterJavaScript;
import com.fr.js.ReportletHyperlink;
import com.fr.js.WebHyperlink;
import com.fr.plugin.chart.designer.component.VanChartUIListControlPane;
import com.fr.stable.ListMap;
import com.fr.stable.Nameable;
import com.fr.stable.bridge.StableFactory;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* Created by Fangjie on 2016/4/28.
*/
public class VanChartHyperLinkPane extends VanChartUIListControlPane {
public VanChartHyperLinkPane() {
super();<|fim▁hole|> public NameableCreator[] createNameableCreators() {
//面板初始化,需要在populate的时候更新
Map<String, NameableCreator> nameCreators = new ListMap<>();
NameableCreator[] creators = DesignModuleFactory.getHyperlinkGroupType().getHyperlinkCreators();
for (NameableCreator creator : creators) {
nameCreators.put(creator.menuName(), creator);
}
Set<HyperlinkProvider> providers = ExtraDesignClassManager.getInstance().getArray(HyperlinkProvider.XML_TAG);
for (HyperlinkProvider provider : providers) {
NameableCreator nc = provider.createHyperlinkCreator();
nameCreators.put(nc.menuName(), nc);
}
return nameCreators.values().toArray(new NameableCreator[nameCreators.size()]);
}
protected BasicBeanPane createPaneByCreators(NameableCreator creator) {
Constructor<? extends BasicBeanPane> constructor = null;
try {
constructor = creator.getUpdatePane().getConstructor(HashMap.class, boolean.class);
return constructor.newInstance(plot.getHyperLinkEditorMap(), false);
} catch (InstantiationException e) {
FRLogger.getLogger().error(e.getMessage(), e);
} catch (IllegalAccessException e) {
FRLogger.getLogger().error(e.getMessage(), e);
} catch (NoSuchMethodException e) {
return super.createPaneByCreators(creator);
} catch (InvocationTargetException e) {
FRLogger.getLogger().error(e.getMessage(), e);
}
return null;
}
/**
* 弹出列表的标题.
*
* @return 返回标题字符串.
*/
public String title4PopupWindow() {
return Inter.getLocText("FR-Designer_Hyperlink");
}
@Override
protected String getAddItemText() {
return Inter.getLocText("FR-Designer_Add_Hyperlink");
}
@Override
protected AddItemMenuDef getAddItemMenuDef (NameableCreator[] creators) {
return new AddVanChartItemMenuDef(creators);
}
public void populate(NameJavaScriptGroup nameHyperlink_array) {
java.util.List<NameObject> list = new ArrayList<NameObject>();
if (nameHyperlink_array != null) {
for (int i = 0; i < nameHyperlink_array.size(); i++) {
list.add(new NameObject(nameHyperlink_array.getNameHyperlink(i).getName(), nameHyperlink_array.getNameHyperlink(i).getJavaScript()));
}
}
this.populate(list.toArray(new NameObject[list.size()]));
}
public void populate(TargetComponent elementCasePane) {
//populate
}
/**
* updateJs的Group
*
* @return 返回NameJavaScriptGroup
*/
public NameJavaScriptGroup updateJSGroup() {
Nameable[] res = this.update();
NameJavaScript[] res_array = new NameJavaScript[res.length];
for (int i = 0; i < res.length; i++) {
NameObject no = (NameObject) res[i];
res_array[i] = new NameJavaScript(no.getName(), (JavaScript) no.getObject());
}
return new NameJavaScriptGroup(res_array);
}
public void populate(Plot plot) {
this.plot = plot;
HashMap paneMap = getHyperlinkMap(plot);
//安装平台内打开插件时,添加相应按钮
Set<HyperlinkProvider> providers = ExtraDesignClassManager.getInstance().getArray(HyperlinkProvider.XML_TAG);
for (HyperlinkProvider provider : providers) {
NameableCreator nc = provider.createHyperlinkCreator();
paneMap.put(nc.getHyperlink(), nc.getUpdatePane());
}
java.util.List<UIMenuNameableCreator> list = refreshList(paneMap);
NameObjectCreator[] creators = new NameObjectCreator[list.size()];
for (int i = 0; list != null && i < list.size(); i++) {
UIMenuNameableCreator uiMenuNameableCreator = list.get(i);
creators[i] = new NameObjectCreator(uiMenuNameableCreator.getName(), uiMenuNameableCreator.getObj().getClass(), uiMenuNameableCreator.getPaneClazz());
}
refreshNameableCreator(creators);
java.util.List<NameObject> nameObjects = new ArrayList<NameObject>();
NameJavaScriptGroup nameGroup = populateHotHyperLink(plot);
for (int i = 0; nameGroup != null && i < nameGroup.size(); i++) {
NameJavaScript javaScript = nameGroup.getNameHyperlink(i);
if (javaScript != null && javaScript.getJavaScript() != null) {
JavaScript script = javaScript.getJavaScript();
UIMenuNameableCreator uiMenuNameableCreator = new UIMenuNameableCreator(javaScript.getName(), script, getUseMap(paneMap, script.getClass()));
nameObjects.add(new NameObject(uiMenuNameableCreator.getName(), uiMenuNameableCreator.getObj()));
}
}
this.populate(nameObjects.toArray(new NameObject[nameObjects.size()]));
doLayout();
}
protected NameJavaScriptGroup populateHotHyperLink(Plot plot) {
return plot.getHotHyperLink();
}
protected HashMap getHyperlinkMap(Plot plot) {
HashMap<Class, Class> map = new HashMap<Class, Class>();
map.put(ReportletHyperlink.class, ReportletHyperlinkPane.class);
map.put(EmailJavaScript.class, ChartEmailPane.class);
map.put(WebHyperlink.class, WebHyperlinkPane.class);
map.put(ParameterJavaScript.class, ParameterJavaScriptPane.class);
map.put(JavaScriptImpl.class, JavaScriptImplPane.class);
map.put(ChartHyperPoplink.class, ChartHyperPoplinkPane.class);
map.put(ChartHyperRelateCellLink.class, ChartHyperRelateCellLinkPane.class);
map.put(ChartHyperRelateFloatLink.class, ChartHyperRelateFloatLinkPane.class);
map.put(FormHyperlinkProvider.class, FormHyperlinkPane.class);
return map;
}
public void update(Plot plot) {
NameJavaScriptGroup nameGroup = updateNameGroup();
updateHotHyperLink(plot, nameGroup);
}
protected void updateHotHyperLink(Plot plot, NameJavaScriptGroup nameGroup) {
plot.setHotHyperLink(nameGroup);
}
private NameJavaScriptGroup updateNameGroup() {
Nameable[] nameables = update();
NameJavaScriptGroup nameGroup = new NameJavaScriptGroup();
nameGroup.clear();
for (int i = 0; i < nameables.length; i++) {
JavaScript javaScript = (JavaScript) ((NameObject) nameables[i]).getObject();
String name = nameables[i].getName();
NameJavaScript nameJava = new NameJavaScript(name, javaScript);
nameGroup.addNameHyperlink(nameJava);
}
return nameGroup;
}
protected java.util.List<UIMenuNameableCreator> refreshList(HashMap map) {
java.util.List<UIMenuNameableCreator> list = new ArrayList<UIMenuNameableCreator>();
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Link_Reportlet"),
new ReportletHyperlink(), getUseMap(map, ReportletHyperlink.class)));
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Link_Mail"), new EmailJavaScript(), VanChartEmailPane.class));
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Link_Web"),
new WebHyperlink(), getUseMap(map, WebHyperlink.class)));
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Link_Dynamic_Parameters"),
new ParameterJavaScript(), getUseMap(map, ParameterJavaScript.class)));
list.add(new UIMenuNameableCreator("JavaScript", new JavaScriptImpl(), getUseMap(map, JavaScriptImpl.class)));
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Float_Chart"),
new ChartHyperPoplink(), getUseMap(map, ChartHyperPoplink.class)));
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Link_Cell"),
new ChartHyperRelateCellLink(), getUseMap(map, ChartHyperRelateCellLink.class)));
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Link_Float"),
new ChartHyperRelateFloatLink(), getUseMap(map, ChartHyperRelateFloatLink.class)));
FormHyperlinkProvider hyperlink = StableFactory.getMarkedInstanceObjectFromClass(FormHyperlinkProvider.XML_TAG, FormHyperlinkProvider.class);
list.add(new UIMenuNameableCreator(Inter.getLocText("Chart-Link_Form"),
hyperlink, getUseMap(map, FormHyperlinkProvider.class)));
return list;
}
protected Class<? extends BasicBeanPane> getUseMap(HashMap map, Object key) {
if (map.get(key) != null) {
return (Class<? extends BasicBeanPane>) map.get(key);
}
//引擎在这边放了个provider,当前表单对象
for (Object tempKey : map.keySet()) {
if (((Class) tempKey).isAssignableFrom((Class) key)) {
return (Class<? extends BasicBeanPane>) map.get(tempKey);
}
}
return null;
}
protected class AddVanChartItemMenuDef extends AddItemMenuDef {
public AddVanChartItemMenuDef(NameableCreator[] creators) {
super(creators);
}
@Override
protected boolean whetherAdd(String itemName) {
return HyperlinkFilterHelper.whetherAddHyperlink4Chart(itemName);
}
}
//邮箱
public static class VanChartEmailPane extends ChartEmailPane {
@Override
protected boolean needRenamePane() {
return false;
}
}
}<|fim▁end|> | }
@Override |
<|file_name|>calc.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env kjscmd5
function Calculator(ui)
{
// Setup entry functions
var display = ui.findChild('display');
this.display = display;
this.one = function() { display.intValue = display.intValue*10+1; }
this.two = function() { display.intValue = display.intValue*10+2; }
this.three = function() { display.intValue = display.intValue*10+3; }
this.four = function() { display.intValue = display.intValue*10+4; }
this.five = function() { display.intValue = display.intValue*10+5; }
this.six = function() { display.intValue = display.intValue*10+6; }
this.seven = function() { display.intValue = display.intValue*10+7; }
this.eight = function() { display.intValue = display.intValue*10+8; }
this.nine = function() { display.intValue = display.intValue*10+9; }
this.zero = function() { display.intValue = display.intValue*10+0; }
ui.connect( ui.findChild('one'), 'clicked()', this, 'one()' );
ui.connect( ui.findChild('two'), 'clicked()', this, 'two()' );
ui.connect( ui.findChild('three'), 'clicked()', this, 'three()' );
ui.connect( ui.findChild('four'), 'clicked()', this, 'four()' );
ui.connect( ui.findChild('five'), 'clicked()', this, 'five()' );
ui.connect( ui.findChild('six'), 'clicked()', this, 'six()' );
ui.connect( ui.findChild('seven'), 'clicked()', this, 'seven()' );
ui.connect( ui.findChild('eight'), 'clicked()', this, 'eight()' );
ui.connect( ui.findChild('nine'), 'clicked()', this, 'nine()' );
ui.connect( ui.findChild('zero'), 'clicked()', this, 'zero()' );
this.val = 0;
this.display.intValue = 0;
this.lastop = function() {}
this.plus = function()
{
this.val = display.intValue+this.val;
display.intValue = 0;
this.lastop=this.plus
}
this.minus = function()
{
this.val = display.intValue-this.val;
display.intValue = 0;
this.lastop=this.minus;
}
ui.connect( ui.findChild('plus'), 'clicked()', this, 'plus()' );
ui.connect( ui.findChild('minus'), 'clicked()', this, 'minus()' );<|fim▁hole|> this.clear = function() { this.lastop=function(){}; display.intValue = 0; this.val = 0; }
ui.connect( ui.findChild('equals'), 'clicked()', this, 'equals()' );
ui.connect( ui.findChild('clear'), 'clicked()', this, 'clear()' );
}
var loader = new QUiLoader();
var ui = loader.load('calc.ui', this);
var calc = new Calculator(ui);
ui.show();
exec();<|fim▁end|> |
this.equals = function() { this.lastop(); display.intValue = this.val; } |
<|file_name|>webdriver_handlers.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::conversions::FromJSValConvertible;
use dom::bindings::conversions::StringificationBehavior;
use dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, HTMLIFrameElementCast};
use dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::Bindings::NodeListBinding::NodeListMethods;
use dom::bindings::js::{OptionalRootable, Rootable, Temporary};
use dom::node::{Node, NodeHelpers};
use dom::window::{ScriptHelpers, WindowHelpers};
use dom::document::DocumentHelpers;
use js::jsapi::JSContext;
use js::jsval::JSVal;
use page::Page;
use msg::constellation_msg::{PipelineId, SubpageId};
use msg::webdriver_msg::{WebDriverJSValue, WebDriverJSError, WebDriverJSResult, WebDriverFrameId};
use script_task::get_page;
use std::rc::Rc;
use std::sync::mpsc::Sender;
fn find_node_by_unique_id(page: &Rc<Page>, pipeline: PipelineId, node_id: String) -> Option<Temporary<Node>> {
let page = get_page(&*page, pipeline);
let document = page.document().root();
let node = NodeCast::from_ref(document.r());
for candidate in node.traverse_preorder() {
if candidate.root().r().get_unique_id() == node_id {
return Some(candidate);
}
}
None
}
pub fn jsval_to_webdriver(cx: *mut JSContext, val: JSVal) -> WebDriverJSResult {
if val.is_undefined() {
Ok(WebDriverJSValue::Undefined)
} else if val.is_boolean() {
Ok(WebDriverJSValue::Boolean(val.to_boolean()))
} else if val.is_double() {
Ok(WebDriverJSValue::Number(FromJSValConvertible::from_jsval(cx, val, ()).unwrap()))
} else if val.is_string() {
//FIXME: use jsstring_to_str when jsval grows to_jsstring
Ok(
WebDriverJSValue::String(
FromJSValConvertible::from_jsval(cx, val, StringificationBehavior::Default).unwrap()))
} else if val.is_null() {
Ok(WebDriverJSValue::Null)
} else {
Err(WebDriverJSError::UnknownType)
}
}
pub fn handle_execute_script(page: &Rc<Page>, pipeline: PipelineId, eval: String, reply: Sender<WebDriverJSResult>) {
let page = get_page(&*page, pipeline);
let window = page.window().root();
let cx = window.r().get_cx();
let rval = window.r().evaluate_js_on_global_with_result(&eval);
reply.send(jsval_to_webdriver(cx, rval)).unwrap();
}
pub fn handle_execute_async_script(page: &Rc<Page>, pipeline: PipelineId, eval: String,
reply: Sender<WebDriverJSResult>) {
let page = get_page(&*page, pipeline);
let window = page.window().root();
window.r().set_webdriver_script_chan(Some(reply));
window.r().evaluate_js_on_global_with_result(&eval);
}
pub fn handle_get_frame_id(page: &Rc<Page>,
pipeline: PipelineId,
webdriver_frame_id: WebDriverFrameId,
reply: Sender<Result<Option<(PipelineId, SubpageId)>, ()>>) {
let window = match webdriver_frame_id {
WebDriverFrameId::Short(_) => {
// This isn't supported yet
Ok(None)
},
WebDriverFrameId::Element(x) => {<|fim▁hole|> Some(ref elem) => Ok(elem.GetContentWindow()),
None => Err(())
}
},
None => Err(())
}
},
WebDriverFrameId::Parent => {
let window = page.window();
Ok(window.root().r().parent())
}
};
let frame_id = window.map(|x| x.and_then(|x| x.root().r().parent_info()));
reply.send(frame_id).unwrap()
}
pub fn handle_find_element_css(page: &Rc<Page>, _pipeline: PipelineId, selector: String,
reply: Sender<Result<Option<String>, ()>>) {
reply.send(match page.document().root().r().QuerySelector(selector.clone()) {
Ok(node) => {
let result = node.map(|x| NodeCast::from_ref(x.root().r()).get_unique_id());
Ok(result)
}
Err(_) => Err(())
}).unwrap();
}
pub fn handle_find_elements_css(page: &Rc<Page>, _pipeline: PipelineId, selector: String,
reply: Sender<Result<Vec<String>, ()>>) {
reply.send(match page.document().root().r().QuerySelectorAll(selector.clone()) {
Ok(ref node_list) => {
let nodes = node_list.root();
let mut result = Vec::with_capacity(nodes.r().Length() as usize);
for i in 0..nodes.r().Length() {
if let Some(ref node) = nodes.r().Item(i) {
result.push(node.root().r().get_unique_id());
}
}
Ok(result)
},
Err(_) => {
Err(())
}
}).unwrap();
}
pub fn handle_get_active_element(page: &Rc<Page>, _pipeline: PipelineId, reply: Sender<Option<String>>) {
reply.send(page.document().root().r().GetActiveElement().map(
|elem| NodeCast::from_ref(elem.root().r()).get_unique_id())).unwrap();
}
pub fn handle_get_title(page: &Rc<Page>, _pipeline: PipelineId, reply: Sender<String>) {
reply.send(page.document().root().r().Title()).unwrap();
}
pub fn handle_get_text(page: &Rc<Page>, pipeline: PipelineId, node_id: String, reply: Sender<Result<String, ()>>) {
reply.send(match find_node_by_unique_id(&*page, pipeline, node_id) {
Some(ref node) => {
Ok(node.root().r().GetTextContent().unwrap_or("".to_owned()))
},
None => Err(())
}).unwrap();
}
pub fn handle_get_name(page: &Rc<Page>, pipeline: PipelineId, node_id: String, reply: Sender<Result<String, ()>>) {
reply.send(match find_node_by_unique_id(&*page, pipeline, node_id) {
Some(tmp_node) => {
let node = tmp_node.root();
let element = ElementCast::to_ref(node.r()).unwrap();
Ok(element.TagName())
},
None => Err(())
}).unwrap();
}<|fim▁end|> | match find_node_by_unique_id(page, pipeline, x) {
Some(ref node) => {
match HTMLIFrameElementCast::to_ref(node.root().r()) { |
<|file_name|>histogram.cc<|end_file_name|><|fim▁begin|>/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/*
* Copyright 2012-2019 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "histogram.h"
#include <string>
using namespace cbc;
using std::string;
void Histogram::install(lcb_INSTANCE *inst, FILE *out)
{
lcb_STATUS rc;
output = out;
lcb_enable_timings(inst);
rc = lcb_cntl(inst, LCB_CNTL_GET, LCB_CNTL_KVTIMINGS, &hg);
lcb_assert(rc == LCB_SUCCESS);
lcb_assert(hg != NULL);
(void)rc;
}
void Histogram::installStandalone(FILE *out)
{
if (hg != NULL) {
return;
}
hg = lcb_histogram_create();
output = out;
}
void Histogram::write()
{
if (hg == NULL) {
return;
}
lcb_histogram_print(hg, output);
}
void Histogram::record(lcb_U64 duration)
{
if (hg == NULL) {
return;
}
lcb_histogram_record(hg, duration);
}<|fim▁end|> | * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* |
<|file_name|>ComFixed.hpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010-2019 The ESPResSo project
* Copyright (C) 2002,2003,2004,2005,2006,2007,2008,2009,2010
* Max-Planck-Institute for Polymer Research, Theory Group
*
* This file is part of ESPResSo.
*
* ESPResSo is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ESPResSo is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|> * GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef SCRIPT_INTERFACE_COM_FIXED_HPP
#define SCRIPT_INTERFACE_COM_FIXED_HPP
#include "script_interface/ScriptInterface.hpp"
#include "core/comfixed_global.hpp"
namespace ScriptInterface {
class ComFixed : public AutoParameters<ComFixed> {
public:
ComFixed() {
add_parameters({{"types",
[](Variant const &v) {
comfixed.set_fixed_types(get_value<std::vector<int>>(v));
},
[]() { return comfixed.get_fixed_types(); }}});
}
};
} // namespace ScriptInterface
#endif<|fim▁end|> | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
<|file_name|>CookieJar.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012, 2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "CookieJar.h"
#include "CookiesStrategy.h"
#include "Document.h"
#include "Frame.h"<|fim▁hole|>#include "PlatformStrategies.h"
namespace WebCore {
static NetworkingContext* networkingContext(const Document* document)
{
// FIXME: Returning 0 means falling back to default context. That's not a choice that is appropriate to do at runtime
if (!document)
return nullptr;
Frame* frame = document->frame();
if (!frame)
return nullptr;
return frame->loader().networkingContext();
}
inline NetworkStorageSession& storageSession(const Document* document)
{
NetworkingContext* context = networkingContext(document);
return context ? context->storageSession() : NetworkStorageSession::defaultStorageSession();
}
String cookies(const Document* document, const URL& url)
{
return platformStrategies()->cookiesStrategy()->cookiesForDOM(storageSession(document), document->firstPartyForCookies(), url);
}
void setCookies(Document* document, const URL& url, const String& cookieString)
{
platformStrategies()->cookiesStrategy()->setCookiesFromDOM(storageSession(document), document->firstPartyForCookies(), url, cookieString);
}
bool cookiesEnabled(const Document* document)
{
return platformStrategies()->cookiesStrategy()->cookiesEnabled(storageSession(document), document->firstPartyForCookies(), document->cookieURL());
}
String cookieRequestHeaderFieldValue(const Document* document, const URL& url)
{
return platformStrategies()->cookiesStrategy()->cookieRequestHeaderFieldValue(storageSession(document), document->firstPartyForCookies(), url);
}
bool getRawCookies(const Document* document, const URL& url, Vector<Cookie>& cookies)
{
return platformStrategies()->cookiesStrategy()->getRawCookies(storageSession(document), document->firstPartyForCookies(), url, cookies);
}
void deleteCookie(const Document* document, const URL& url, const String& cookieName)
{
platformStrategies()->cookiesStrategy()->deleteCookie(storageSession(document), url, cookieName);
}
void addCookie(const Document* document, const URL& url, const Cookie& cookie)
{
platformStrategies()->cookiesStrategy()->addCookie(storageSession(document), url, cookie);
}
}<|fim▁end|> | #include "FrameLoader.h"
#include "NetworkingContext.h"
#include "PlatformCookieJar.h" |
<|file_name|>associated-types-sugar-path.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test paths to associated types using the type-parameter-only sugar.
// pretty-expanded FIXME #23616
pub trait Foo {
type A;
fn boo(&self) -> Self::A;
}
impl Foo for isize {
type A = usize;
fn boo(&self) -> usize {
5<|fim▁hole|> }
}
// Using a type via a function.
pub fn bar<T: Foo>(a: T, x: T::A) -> T::A {
let _: T::A = a.boo();
x
}
// Using a type via an impl.
trait C {
fn f();
fn g(&self) { }
}
struct B<X>(X);
impl<T: Foo> C for B<T> {
fn f() {
let x: T::A = panic!();
}
}
pub fn main() {
let z: usize = bar(2, 4);
}<|fim▁end|> | |
<|file_name|>loggers.py<|end_file_name|><|fim▁begin|># *****************************************************************************
# conduct - CONvenient Construction Tool
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# Alexander Lenz <[email protected]>
# Georg Brandl <[email protected]>
#
# *****************************************************************************
import os
import sys
import time
import linecache
import traceback
import logging
from os import path
from logging import Logger, Formatter, Handler, DEBUG, INFO, WARNING, ERROR
from conduct import colors
LOGFMT = '%(asctime)s : %(levelname)-7s : %(name)-25s: %(message)s'
DATEFMT = '%H:%M:%S'
DATESTAMP_FMT = '%Y-%m-%d'
SECONDS_PER_DAY = 60 * 60 * 24
LOGLEVELS = {'debug': DEBUG, 'info': INFO, 'warning': WARNING, 'error': ERROR}
INVLOGLEVELS = {value : key for key, value in LOGLEVELS.items()}
class ConductLogger(Logger):
maxLogNameLength = 0
def __init__(self, *args, **kwargs):
Logger.__init__(self, *args, **kwargs)
ConductLogger._storeLoggerNameLength(self)
def getChild(self, suffix, ownDir=False):
child = Logger.getChild(self, suffix)
child.setLevel(self.getEffectiveLevel())
if ownDir:
for handler in self._collectHandlers():
if isinstance(handler, LogfileHandler):
handler = handler.getChild(suffix)
child.addHandler(handler)
child.propagate = False
return child
def _collectHandlers(self):
result = []
log = self
while log is not None:
result += log.handlers
log = log.parent
return result
@staticmethod
def _storeLoggerNameLength(logObj):
# store max logger name length for formatting
if len(logObj.name) > ConductLogger.maxLogNameLength:
ConductLogger.maxLogNameLength = len(logObj.name)
class ConsoleFormatter(Formatter):
"""
A lightweight formatter for the interactive console, with optional
colored output.
"""
def __init__(self, fmt=None, datefmt=None, colorize=None):
Formatter.__init__(self, fmt, datefmt)
if colorize:
self.colorize = colorize
else:
self.colorize = lambda c, s: s
def formatException(self, exc_info):
return traceback.format_exception_only(*exc_info[0:2])[-1]
def formatTime(self, record, datefmt=None):
return time.strftime(datefmt or DATEFMT,
self.converter(record.created))
def format(self, record):
record.message = record.getMessage()
levelno = record.levelno
datefmt = self.colorize('lightgray', '[%(asctime)s] ')
namefmt = '%(name)-' + str(ConductLogger.maxLogNameLength) + 's: '
if levelno <= DEBUG:
fmtstr = self.colorize('darkgray', '%s%%(message)s' % namefmt)
elif levelno <= INFO:
fmtstr = '%s%%(message)s' % namefmt
elif levelno <= WARNING:
fmtstr = self.colorize('fuchsia', '%s%%(levelname)s: %%(message)s'
% namefmt)
else:
# Add exception type to error (if caused by exception)
msgPrefix = ''
if record.exc_info:
msgPrefix = '%s: ' % record.exc_info[0].__name__
fmtstr = self.colorize('red', '%s%%(levelname)s: %s%%(message)s'
% (namefmt, msgPrefix))
fmtstr = datefmt + fmtstr
if not getattr(record, 'nonl', False):
fmtstr += '\n'
record.asctime = self.formatTime(record, self.datefmt)
s = fmtstr % record.__dict__
# never output more exception info -- the exception message is already
# part of the log message because of our special logger behavior
# if record.exc_info:
# # *not* caching exception text on the record, since it's
# # only a short version
# s += self.formatException(record.exc_info)
return s
def format_extended_frame(frame):
ret = []
for key, value in frame.f_locals.items():
try:
valstr = repr(value)[:256]
except Exception:
valstr = '<cannot be displayed>'
ret.append(' %-20s = %s\n' % (key, valstr))
ret.append('\n')
return ret
def format_extended_traceback(etype, value, tb):
ret = ['Traceback (most recent call last):\n']
while tb is not None:
frame = tb.tb_frame
filename = frame.f_code.co_filename
item = ' File "%s", line %d, in %s\n' % (filename, tb.tb_lineno,
frame.f_code.co_name)
linecache.checkcache(filename)
line = linecache.getline(filename, tb.tb_lineno, frame.f_globals)
if line:
item = item + ' %s\n' % line.strip()
ret.append(item)
if filename != '<script>':
ret += format_extended_frame(tb.tb_frame)
tb = tb.tb_next
ret += traceback.format_exception_only(etype, value)
return ''.join(ret).rstrip('\n')
class LogfileFormatter(Formatter):
"""
The standard Formatter does not support milliseconds with an explicit
datestamp format. It also doesn't show the full traceback for exceptions.
"""
extended_traceback = True
def formatException(self, ei):
if self.extended_traceback:
s = format_extended_traceback(*ei)
else:
s = ''.join(traceback.format_exception(ei[0], ei[1], ei[2],
sys.maxsize))
if s.endswith('\n'):
s = s[:-1]
return s
def formatTime(self, record, datefmt=None):
res = time.strftime(DATEFMT, self.converter(record.created))
res += ',%03d' % record.msecs
return res
class StreamHandler(Handler):
"""Reimplemented from logging: remove cruft, remove bare excepts."""
def __init__(self, stream=None):
Handler.__init__(self)
if stream is None:
stream = sys.stderr
self.stream = stream
def flush(self):
self.acquire()
try:
if self.stream and hasattr(self.stream, 'flush'):
self.stream.flush()
finally:
self.release()
def emit(self, record):
try:
msg = self.format(record)
try:
self.stream.write('%s\n' % msg)
except UnicodeEncodeError:
self.stream.write('%s\n' % msg.encode('utf-8'))
self.flush()
except Exception:
self.handleError(record)
class LogfileHandler(StreamHandler):
"""
Logs to log files with a date stamp appended, and rollover on midnight.
"""
def __init__(self, directory, filenameprefix, dayfmt=DATESTAMP_FMT):
self._directory = path.join(directory, filenameprefix)
if not path.isdir(self._directory):
os.makedirs(self._directory)
self._currentsymlink = path.join(self._directory, 'current')
self._filenameprefix = filenameprefix
self._pathnameprefix = path.join(self._directory, filenameprefix)
self._dayfmt = dayfmt
# today's logfile name
basefn = self._pathnameprefix + '-' + time.strftime(dayfmt) + '.log'
self.baseFilename = path.abspath(basefn)
self.mode = 'a'
StreamHandler.__init__(self, self._open())
# determine time of first midnight from now on
t = time.localtime()
self.rollover_at = time.mktime((t[0], t[1], t[2], 0, 0, 0,
t[6], t[7], t[8])) + SECONDS_PER_DAY
self.setFormatter(LogfileFormatter(LOGFMT, DATEFMT))
self.disabled = False
def getChild(self, name):
return LogfileHandler(self._directory, name)
def filter(self, record):
return not self.disabled
def emit(self, record):
try:
t = int(time.time())
if t >= self.rollover_at:
self.doRollover()
if self.stream is None:
self.stream = self._open()
StreamHandler.emit(self, record)
except Exception:
self.handleError(record)
def enable(self, enabled):
if enabled:
self.disabled = False
self.stream.close()
self.stream = self._open()
else:
self.disabled = True
def close(self):
self.acquire()
try:<|fim▁hole|> StreamHandler.close(self)
self.stream = None
finally:
self.release()
def doRollover(self):
self.stream.close()
self.baseFilename = self._pathnameprefix + '-' + \
time.strftime(self._dayfmt) + '.log'
self.stream = self._open()
self.rollover_at += SECONDS_PER_DAY
def _open(self):
# update 'current' symlink upon open
try:
os.remove(self._currentsymlink)
except OSError:
# if the symlink does not (yet) exist, OSError is raised.
# should happen at most once per installation....
pass
if hasattr(os, 'symlink'):
os.symlink(path.basename(self.baseFilename), self._currentsymlink)
# finally open the new logfile....
return open(self.baseFilename, self.mode)
class ColoredConsoleHandler(StreamHandler):
"""
A handler class that writes colorized records to standard output.
"""
def __init__(self):
StreamHandler.__init__(self, sys.stdout)
self.setFormatter(ConsoleFormatter(datefmt=DATEFMT,
colorize=colors.colorize))
def emit(self, record):
msg = self.format(record)
try:
self.stream.write(msg)
except UnicodeEncodeError:
self.stream.write(msg.encode('utf-8'))
self.stream.flush()<|fim▁end|> | if self.stream:
self.flush()
if hasattr(self.stream, 'close'):
self.stream.close() |
<|file_name|>test_root.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import StringIO
import allura
import json
import PIL
from nose.tools import assert_true, assert_equal, assert_in, assert_not_equal, assert_not_in
from ming.orm.ormsession import ThreadLocalORMSession
from mock import patch
from tg import config
from allura import model as M
from allura.lib import helpers as h
from allura.tests import decorators as td
from alluratest.controller import TestController
from forgewiki import model
class TestRootController(TestController):
def setUp(self):
super(TestRootController, self).setUp()
self.setup_with_tools()
@td.with_wiki
def setup_with_tools(self):
pass
def _find_edit_form(self, resp):
def cond(f):
return f.id == 'page_edit_form'
return self.find_form(resp, cond)
def test_root_index(self):
page_url = h.urlquote(u'/wiki/tést/')
r = self.app.get(page_url).follow()
assert u'tést' in r
assert 'Create Page' in r
# No 'Create Page' button if user doesn't have 'create' perm
r = self.app.get('/wiki/Home',
extra_environ=dict(username='*anonymous'))
assert 'Create Page' not in r, r
@td.with_wiki
def test_create_wiki_page(self):
url = u"/p/test/wiki/create_wiki_page/"
r = self.app.get(url)
assert u'test' in r
assert u'Create page' in r.body
def test_root_markdown_syntax(self):
response = self.app.get('/wiki/markdown_syntax/')
assert 'Markdown Syntax' in response
def test_root_browse_tags(self):
response = self.app.get('/wiki/browse_tags/')
assert 'Browse Labels' in response
def test_root_browse_pages(self):
response = self.app.get('/wiki/browse_pages/')
assert 'Browse Pages' in response
def test_root_new_page(self):
response = self.app.get('/wiki/new_page?title=' + h.urlquote(u'tést'))
assert u'tést' in response
def test_root_new_search(self):
self.app.get(h.urlquote(u'/wiki/tést/'))
response = self.app.get('/wiki/search/?q=' + h.urlquote(u'tést'))
assert u'Search wiki: tést' in response
def test_feed(self):
for ext in ['', '.rss', '.atom']:
self.app.get('/wiki/feed%s' % ext, status=200)
@patch('allura.lib.search.search')
def test_search(self, search):
r = self.app.get('/wiki/search/?q=test')
assert_in(
'<a href="/wiki/search/?q=test&sort=score+asc" class="strong">relevance</a>', r)
assert_in(
'<a href="/wiki/search/?q=test&sort=mod_date_dt+desc" class="">date</a>', r)
p = M.Project.query.get(shortname='test')
r = self.app.get('/wiki/search/?q=test&sort=score+asc')
solr_query = {
'short_timeout': True,
'ignore_errors': False,
'rows': 25,
'start': 0,
'qt': 'dismax',
'qf': 'title^2 text',
'pf': 'title^2 text',
'fq': [
'project_id_s:%s' % p._id,
'mount_point_s:wiki',
'-deleted_b:true',
'type_s:("WikiPage" OR "WikiPage Snapshot")',
'is_history_b:False',
],
'hl': 'true',
'hl.simple.pre': '#ALLURA-HIGHLIGHT-START#',
'hl.simple.post': '#ALLURA-HIGHLIGHT-END#',
'sort': 'score asc',
}
search.assert_called_with('test', **solr_query)
r = self.app.get(
'/wiki/search/?q=test&search_comments=on&history=on&sort=mod_date_dt+desc')
solr_query['fq'][
3] = 'type_s:("WikiPage" OR "WikiPage Snapshot" OR "Post")'
solr_query['fq'].remove('is_history_b:False')
solr_query['sort'] = 'mod_date_dt desc'
search.assert_called_with('test', **solr_query)
r = self.app.get('/wiki/search/?q=test&parser=standard')
solr_query['sort'] = 'score desc'
solr_query['fq'][3] = 'type_s:("WikiPage" OR "WikiPage Snapshot")'
solr_query['fq'].append('is_history_b:False')
solr_query.pop('qt')
solr_query.pop('qf')
solr_query.pop('pf')
search.assert_called_with('test', **solr_query)
def test_search_help(self):
r = self.app.get('/wiki/search/?q=test')
btn = r.html.find('a', attrs={'class': 'icon btn search_help_modal'})
assert btn is not None, "Can't find a help button"
div = r.html.find('div', attrs={'id': 'lightbox_search_help_modal'})
assert div is not None, "Can't find help text"
assert_in('To search for an exact phrase', div.text)
def test_nonexistent_page_edit(self):
resp = self.app.get('/wiki/tést/')
assert resp.location.endswith(h.urlquote(u'/wiki/tést/edit')), resp.location
resp = resp.follow()
assert 'tést' in resp
def test_nonexistent_page_noedit(self):
self.app.get('/wiki/tést/',
extra_environ=dict(username='*anonymous'),
status=404)
self.app.get('/wiki/tést/',
extra_environ=dict(username='test-user'),
status=404)
@patch('forgewiki.wiki_main.g.director.create_activity')
def test_activity(self, create_activity):
d = dict(title='foo', text='footext')
self.app.post('/wiki/foo/update', params=d)
assert create_activity.call_count == 1
assert create_activity.call_args[0][1] == 'created'
create_activity.reset_mock()
d = dict(title='foo', text='new footext')
self.app.post('/wiki/foo/update', params=d)
assert create_activity.call_count == 1
assert create_activity.call_args[0][1] == 'modified'
create_activity.reset_mock()
d = dict(title='new foo', text='footext')
self.app.post('/wiki/foo/update', params=d)
assert create_activity.call_count == 1
assert create_activity.call_args[0][1] == 'renamed'
def test_labels(self):
response = self.app.post(
'/wiki/foo-bar/update',
params={
'title': 'foo',
'text': 'sometext',
'labels': 'test label',
'viewable_by-0.id': 'all'}).follow()
assert_in('<a href="/p/test/wiki/search/?q=labels_t:%22test label%22&parser=standard">test label (1)</a>',
response)
def test_title_slashes(self):
# forward slash not allowed in wiki page title - converted to dash
response = self.app.post(
'/wiki/foo-bar/update',
params={
'title': 'foo/bar',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'}).follow()
assert 'foo-bar' in response
assert 'foo-bar' in response.request.url
def test_dotted_page_name(self):
r = self.app.post(
'/wiki/page.dot/update',
params={
'title': 'page.dot',
'text': 'text1',
'labels': '',
'viewable_by-0.id': 'all'}).follow()
assert 'page.dot' in r
def test_subpage_attempt(self):
self.app.get('/wiki/tést/')
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'text1',
'labels': '',
'viewable_by-0.id': 'all'})
assert '/p/test/wiki/Home/' in self.app.get('/wiki/tést/Home/')
self.app.get('/wiki/tést/notthere/', status=404)
def test_page_history(self):
self.app.get('/wiki/tést/')
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'text1',
'labels': '',
'viewable_by-0.id': 'all'})
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'text2',
'labels': '',
'viewable_by-0.id': 'all'})
response = self.app.get('/wiki/tést/history')
assert 'tést' in response
# two revisions are shown
assert '2 by Test Admin' in response
assert '1 by Test Admin' in response
# you can revert to an old revison, but not the current one
assert response.html.find('a', {'data-dialog-id': '1'}), response.html
assert not response.html.find('a', {'data-dialog-id': '2'})
response = self.app.get('/wiki/tést/history',
extra_environ=dict(username='*anonymous'))
# two revisions are shown
assert '2 by Test Admin' in response
assert '1 by Test Admin' in response
# you cannot revert to any revision
assert not response.html.find('a', {'data-dialog-id': '1'})
assert not response.html.find('a', {'data-dialog-id': '2'})
def test_page_diff(self):
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
self.app.post('/wiki/tést/revert', params=dict(version='1'))
response = self.app.get('/wiki/tést/diff?v1=0&v2=0')
assert 'tést' in response
d = dict(title='testdiff', text="""**Optionally**, you may also want to remove all the unused accounts that have accumulated (one was created for *every* logged in SF-user who has visited your MediaWiki hosted app):
~~~~~
php removeUnusedAccounts.php --delete
~~~~~
#### 6) Import image (and other) files into your Mediawiki install ####
Upload the backup of your data files to the project web.
~~~~~
scp projectname_mediawiki_files.tar.gz [email protected]:
~~~~~
In the project web shell, unpack the files to the images directory of you wiki installation. In the backup, the images are in a subfolder *projectname*, so follow these steps:
~~~~~
cd wiki
mkdir oldimages
cd oldimages
tar -xvzf ../../../projectname_mediawiki_files.tar.gz
mv projectname/* ../images/
cd ..
rm -r oldimages
# Now fix permissons. Wrong permissions may cause massive slowdown!
chown yournick:apache images/ --recursive
chmod 775 images/ --recursive
~~~~~
**TODO: FIXME:** The following can't be quite correct:
Now hit your wiki a few times from a browser. Initially, it will be dead slow, as it is trying to build thumbnails for the images. And it will time out, a lot. Keep hitting reload, until it works.
**Note:** The logo shown in the sidebar is no longer stored as an object in the wiki (as it was in the Hosted App installation). Rather save it as a regular file, then edit LocalSettings.php, adding""")
self.app.post('/wiki/testdiff/update', params=d)
d = dict(title='testdiff', text="""**Optionally**, you may also want to remove all the unused accounts that have accumulated (one was created for *every* logged in SF-user who has visited your MediaWiki hosted app):
~~~~~
php removeUnusedAccounts.php --delete
~~~~~
#### 6) Import image (and other) files into your Mediawiki install ####
Upload the backup of your data files to the project web.
~~~~~
scp projectname_mediawiki_files.tar.gz [email protected]:
~~~~~
In the project web shell, unpack the files to the images directory of you wiki installation. In the backup, the images are in a subfolder *projectname*, so follow these steps:
~~~~~
cd wiki
mkdir oldimages
cd oldimages
tar -xvzf ../../../projectname_mediawiki_files.tar.gz
mv projectname/* ../images/
cd ..
rm -r oldimages
# Now fix permissions. Wrong permissions may cause a massive slowdown!
chown yournick:apache images/ --recursive
chmod 775 images/ --recursive
~~~~~
**TODO: FIXME:** The following can't be quite correct:
Now hit your wiki a few times from a browser. Initially, it will be dead slow, as it is trying to build thumbnails for the images. And it will time out, a lot. Keep hitting reload, until it works.
**Note:** The logo shown in the sidebar is no longer stored as an object in the wiki (as it was in the Hosted App installation). Rather save it as a regular file, then edit LocalSettings.php, adding""")
self.app.post('/wiki/testdiff/update', params=d)
response = self.app.get('/wiki/testdiff/diff?v1=1&v2=2')
assert_in('# Now fix <del> permissons. </del> <ins> permissions. </ins> '
'Wrong permissions may cause <ins> a </ins> massive slowdown!',
response)
response = self.app.get('/wiki/testdiff/diff?v1=2&v2=1')
assert_in('# Now fix <del> permissions. </del> <ins> permissons. </ins> '
'Wrong permissions may cause <del> a </del> massive slowdown!',
response)
def test_page_raw(self):
self.app.post(
'/wiki/TEST/update',
params={
'title': 'TEST',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
response = self.app.get('/wiki/TEST/raw')
assert 'TEST' in response
def test_page_revert_no_text(self):
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': '',
'labels': '',
'viewable_by-0.id': 'all'})
response = self.app.post('/wiki/tést/revert', params=dict(version='1'))
assert '.' in response.json['location']
response = self.app.get('/wiki/tést/')
assert 'tést' in response
def test_page_revert_with_text(self):
self.app.get('/wiki/tést/')
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
response = self.app.post('/wiki/tést/revert', params=dict(version='1'))
assert '.' in response.json['location']
response = self.app.get('/wiki/tést/')
assert 'tést' in response
@patch('forgewiki.wiki_main.g.spam_checker')
def test_page_update(self, spam_checker):
self.app.get('/wiki/tést/')
response = self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
assert_equal(spam_checker.check.call_args[0][0], u'tést\nsometext')
assert 'tést' in response
def test_page_label_unlabel(self):
self.app.get('/wiki/tést/')
response = self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': 'yellow,green',
'viewable_by-0.id': 'all'})
assert 'tést' in response
response = self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': 'yellow',
'viewable_by-0.id': 'all'})
assert 'tést' in response
def test_page_label_count(self):
labels = "label"
for i in range(1, 100):
labels += ',label%s' % i
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': labels,
'viewable_by-0.id': 'all'})
r = self.app.get('/wiki/browse_tags/')
assert 'results of 100 ' in r
assert '<div class="page_list">' in r
assert '(Page 1 of 4)' in r
assert '<td>label30</td>' in r
assert '<td>label1</td>' in r
r = self.app.get('/wiki/browse_tags/?page=3')
assert '<td>label77</td>' in r
assert '<td>label99</td>' in r
def test_new_attachment(self):
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
content = file(__file__).read()
self.app.post('/wiki/tést/attach',
upload_files=[('file_info', 'test_root.py', content)])
response = self.app.get('/wiki/tést/')
assert 'test_root.py' in response
def test_attach_two_files(self):
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
content = file(__file__).read()
self.app.post('/wiki/tést/attach',
upload_files=[('file_info', 'test1.py', content), ('file_info', 'test2.py', content)])
response = self.app.get('/wiki/tést/')
assert 'test1.py' in response
assert 'test2.py' in response
def test_new_text_attachment_content(self):
self.app.post(
'/wiki/tést/update',
params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
file_name = 'test_root.py'
file_data = file(__file__).read()
upload = ('file_info', file_name, file_data)
self.app.post('/wiki/tést/attach', upload_files=[upload])
page_editor = self.app.get('/wiki/tést/edit')
download = page_editor.click(description=file_name)
assert_true(download.body == file_data)
def test_new_image_attachment_content(self):
self.app.post('/wiki/TEST/update', params={
'title': 'TEST',<|fim▁hole|> 'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
file_name = 'neo-icon-set-454545-256x350.png'
file_path = os.path.join(
allura.__path__[0], 'nf', 'allura', 'images', file_name)
file_data = file(file_path).read()
upload = ('file_info', file_name, file_data)
self.app.post('/wiki/TEST/attach', upload_files=[upload])
h.set_context('test', 'wiki', neighborhood='Projects')
page = model.Page.query.find(dict(title='TEST')).first()
filename = page.attachments[0].filename
uploaded = PIL.Image.open(file_path)
r = self.app.get('/wiki/TEST/attachment/' + filename)
downloaded = PIL.Image.open(StringIO.StringIO(r.body))
assert uploaded.size == downloaded.size
r = self.app.get('/wiki/TEST/attachment/' + filename + '/thumb')
thumbnail = PIL.Image.open(StringIO.StringIO(r.body))
assert thumbnail.size == (255, 255)
# Make sure thumbnail is absent
r = self.app.get('/wiki/TEST/')
img_srcs = [i['src'] for i in r.html.findAll('img')]
assert ('/p/test/wiki/TEST/attachment/' +
filename) not in img_srcs, img_srcs
def test_sidebar_static_page(self):
response = self.app.get('/wiki/tést/')
assert 'Edit this page' not in response
assert 'Related Pages' not in response
def test_related_links(self):
response = self.app.get('/wiki/TEST/').follow()
assert 'Edit TEST' in response
assert 'Related' not in response
self.app.post('/wiki/TEST/update', params={
'title': 'TEST',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
self.app.post('/wiki/aaa/update', params={
'title': 'aaa',
'text': '',
'labels': '',
'viewable_by-0.id': 'all'})
self.app.post('/wiki/bbb/update', params={
'title': 'bbb',
'text': '',
'labels': '',
'viewable_by-0.id': 'all'})
h.set_context('test', 'wiki', neighborhood='Projects')
a = model.Page.query.find(dict(title='aaa')).first()
a.text = '\n[TEST]\n'
b = model.Page.query.find(dict(title='TEST')).first()
b.text = '\n[bbb]\n'
ThreadLocalORMSession.flush_all()
M.MonQTask.run_ready()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
response = self.app.get('/wiki/TEST/')
assert 'Related' in response
assert 'aaa' in response
assert 'bbb' in response
def test_show_discussion(self):
self.app.post('/wiki/tést/update', params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
wiki_page = self.app.get('/wiki/tést/')
assert wiki_page.html.find('div', {'id': 'new_post_holder'})
options_admin = self.app.get(
'/admin/wiki/options', validate_chunk=True)
assert options_admin.form['show_discussion'].checked
options_admin.form['show_discussion'].checked = False
options_admin.form.submit()
options_admin2 = self.app.get(
'/admin/wiki/options', validate_chunk=True)
assert not options_admin2.form['show_discussion'].checked
wiki_page2 = self.app.get('/wiki/tést/')
assert not wiki_page2.html.find('div', {'id': 'new_post_holder'})
def test_show_left_bar(self):
self.app.post('/wiki/tést/update', params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
wiki_page = self.app.get('/wiki/tést/')
assert wiki_page.html.find('ul', {'class': 'sidebarmenu'})
options_admin = self.app.get(
'/admin/wiki/options', validate_chunk=True)
assert options_admin.form['show_left_bar'].checked
options_admin.form['show_left_bar'].checked = False
options_admin.form.submit()
options_admin2 = self.app.get(
'/admin/wiki/options', validate_chunk=True)
assert not options_admin2.form['show_left_bar'].checked
wiki_page2 = self.app.get(
'/wiki/tést/', extra_environ=dict(username='*anonymous'))
assert not wiki_page2.html.find('ul', {'class': 'sidebarmenu'})
wiki_page3 = self.app.get('/wiki/tést/')
assert not wiki_page3.html.find('ul', {'class': 'sidebarmenu'})
def test_show_metadata(self):
self.app.post('/wiki/tést/update', params={
'title': 'tést',
'text': 'sometext',
'labels': '',
'viewable_by-0.id': 'all'})
wiki_page = self.app.get('/wiki/tést/')
assert wiki_page.html.find('div', {'class': 'editbox'})
options_admin = self.app.get(
'/admin/wiki/options', validate_chunk=True)
assert options_admin.form['show_right_bar'].checked
options_admin.form['show_right_bar'].checked = False
options_admin.form.submit()
options_admin2 = self.app.get(
'/admin/wiki/options', validate_chunk=True)
assert not options_admin2.form['show_right_bar'].checked
wiki_page2 = self.app.get('/wiki/tést/')
assert not wiki_page2.html.find('div', {'class': 'editbox'})
def test_edit_mount_label(self):
r = self.app.get('/admin/wiki/edit_label', validate_chunk=True)
assert r.form['mount_label'].value == 'Wiki'
r = self.app.post('/admin/wiki/update_label', params=dict(
mount_label='Tricky Wiki'))
assert M.MonQTask.query.find({
'task_name': 'allura.tasks.event_tasks.event',
'args': 'project_menu_updated'
}).all()
r = self.app.get('/admin/wiki/edit_label', validate_chunk=True)
assert r.form['mount_label'].value == 'Tricky Wiki'
def test_page_links_are_colored(self):
self.app.get('/wiki/space%20page/')
params = {
'title': 'space page',
'text': '''There is a space in the title!''',
'labels': '',
'viewable_by-0.id': 'all'}
self.app.post('/wiki/space%20page/update', params=params)
self.app.get('/wiki/TEST/')
params = {
'title': 'TEST',
'text': '''
* Here is a link to [this page](TEST)
* Here is a link to [another page](Some page which does not exist)
* Here is a link to [space page space](space page)
* Here is a link to [space page escape](space%20page)
* Here is a link to [TEST]
* Here is a link to [Some page which does not exist]
* Here is a link to [space page]
* Here is a link to [space%20page]
* Here is a link to [another attach](TEST/attachment/attach.txt)
* Here is a link to [attach](TEST/attachment/test_root.py)
''',
'labels': '',
'viewable_by-0.id': 'all'}
self.app.post('/wiki/TEST/update', params=params)
content = file(__file__).read()
self.app.post('/wiki/TEST/attach',
upload_files=[('file_info', 'test_root.py', content)])
r = self.app.get('/wiki/TEST/')
found_links = 0
for link in r.html.findAll('a'):
if link.contents == ['this page']:
assert 'notfound' not in link.get('class', '')
found_links += 1
if link.contents == ['another page']:
assert 'notfound' not in link.get('class', '')
found_links += 1
if link.contents == ['space page space']:
assert 'notfound' not in link.get('class', '')
found_links += 1
if link.contents == ['space page escape']:
assert 'notfound' not in link.get('class', '')
found_links += 1
if link.contents == ['[TEST]']:
assert 'notfound' not in link.get('class', '')
found_links += 1
if link.contents == ['[Some page which does not exist]']:
assert 'notfound' in link.get('class', '')
found_links += 1
if link.contents == ['[space page]']:
assert 'notfound' not in link.get('class', '')
found_links += 1
if link.contents == ['[space%20page]']:
assert 'notfound' not in link.get('class', '')
found_links += 1
if link.contents == ['another attach']:
assert 'notfound' in link.get('class', '')
found_links += 1
if link.contents == ['attach']:
assert 'notfound' not in link.get('class', '')
found_links += 1
assert found_links == 10, 'Wrong number of links found'
def test_home_rename(self):
assert 'The resource was found at http://localhost/p/test/wiki/Home/;' in self.app.get(
'/p/test/wiki/')
req = self.app.get('/p/test/wiki/Home/edit')
form = self._find_edit_form(req)
form['title'].value = 'new_title'
form.submit()
assert 'The resource was found at http://localhost/p/test/wiki/new_title/;' in self.app.get(
'/p/test/wiki/')
@patch.dict('allura.lib.app_globals.config', markdown_cache_threshold='0')
def test_cached_html(self):
"""Ensure cached html is not escaped."""
html = '<p><span>My Html</span></p>'
self.app.post('/wiki/cache/update', params={
'title': 'cache',
'text': html,
'labels': '',
'viewable_by-0.id': 'all'})
# first request caches html, second serves from cache
r = self.app.get('/wiki/cache/')
r = self.app.get('/wiki/cache/')
assert_true(html in r)
def test_page_delete(self):
self.app.post('/wiki/aaa/update', params={
'title': 'aaa',
'text': '111',
'labels': '',
'viewable_by-0.id': 'all'})
self.app.post('/wiki/bbb/update', params={
'title': 'bbb',
'text': '222',
'labels': '',
'viewable_by-0.id': 'all'})
response = self.app.get('/wiki/browse_pages/')
assert 'aaa' in response
assert 'bbb' in response
self.app.post('/wiki/bbb/delete')
response = self.app.get('/wiki/browse_pages/')
assert 'aaa' in response
assert '?deleted=True">bbb' in response
n = M.Notification.query.get(subject="[test:wiki] test-admin removed page bbb")
assert '222' in n.text
def test_mailto_links(self):
self.app.get('/wiki/test_mailto/')
params = {
'title': 'test_mailto',
'text': '''
* Automatic mailto #1 <[email protected]>
* Automatic mailto #2 <mailto:[email protected]>
* Handmaid mailto <a href="mailto:[email protected]">Email Yoda</a>
''',
'labels': '',
'viewable_by-0.id': 'all'}
self.app.post('/wiki/test_mailto/update', params=params)
r = self.app.get('/wiki/test_mailto/')
mailto_links = 0
for link in r.html.findAll('a'):
if link.get('href') == 'mailto:[email protected]':
assert 'notfound' not in link.get('class', '')
mailto_links += 1
if link.get('href') == 'mailto:[email protected]':
assert 'notfound' not in link.get('class', '')
mailto_links += 1
if link.get('href') == 'mailto:[email protected]':
assert link.contents == ['Email Yoda']
assert 'notfound' not in link.get('class', '')
mailto_links += 1
assert mailto_links == 3, 'Wrong number of mailto links'
def test_user_browse_page(self):
r = self.app.get('/wiki/browse_pages/')
assert '<td>Test Admin (test-admin)</td>' in r
def test_subscribe(self):
user = M.User.query.get(username='test-user')
# user is not subscribed
assert not M.Mailbox.subscribed(user_id=user._id)
r = self.app.get('/p/test/wiki/Home/', extra_environ={'username': str(user.username)})
sidebar_menu = r.html.find('div', attrs={'id': 'sidebar'})
assert 'Subscribe to wiki' in str(sidebar_menu)
# subscribe
self.app.post('/p/test/wiki/subscribe', {'subscribe': True},
extra_environ={'username': str(user.username)}).follow()
# user is subscribed
assert M.Mailbox.subscribed(user_id=user._id)
r = self.app.get('/p/test/wiki/Home/', extra_environ={'username': str(user.username)})
sidebar_menu = r.html.find('div', attrs={'id': 'sidebar'})
assert 'Unsubscribe' in str(sidebar_menu)
# unsubscribe
self.app.post('/p/test/wiki/subscribe', {'unsubscribe': True},
extra_environ={'username': str(user.username)}).follow()
# user is not subscribed
assert not M.Mailbox.subscribed(user_id=user._id)
r = self.app.get('/p/test/wiki/Home/', extra_environ={'username': str(user.username)})
sidebar_menu = r.html.find('div', attrs={'id': 'sidebar'})
assert 'Subscribe to wiki' in str(sidebar_menu)
def test_rate_limit_new_page(self):
# Set rate limit to unlimit
with h.push_config(config, **{'forgewiki.rate_limits': '{}'}):
r = self.app.get('/p/test/wiki/new-page-title/')
assert_equal(r.status_int, 302)
assert_equal(
r.location,
'http://localhost/p/test/wiki/new-page-title/edit')
assert_equal(self.webflash(r), '')
# Set rate limit to 1 in first hour of project
with h.push_config(config, **{'forgewiki.rate_limits': '{"3600": 1}'}):
r = self.app.get('/p/test/wiki/new-page-title/')
assert_equal(r.status_int, 302)
assert_equal(r.location, 'http://localhost/p/test/wiki/')
wf = json.loads(self.webflash(r))
assert_equal(wf['status'], 'error')
assert_equal(
wf['message'],
'Page create/edit rate limit exceeded. Please try again later.')
def test_rate_limit_update(self):
# Set rate limit to unlimit
with h.push_config(config, **{'forgewiki.rate_limits': '{}'}):
r = self.app.post(
'/p/test/wiki/page1/update',
dict(text='Some text', title='page1')).follow()
assert_in('Some text', r)
p = model.Page.query.get(title='page1')
assert_not_equal(p, None)
# Set rate limit to 1 in first hour of project
with h.push_config(config, **{'forgewiki.rate_limits': '{"3600": 1}'}):
r = self.app.post(
'/p/test/wiki/page2/update',
dict(text='Some text', title='page2'))
assert_equal(r.status_int, 302)
assert_equal(r.location, 'http://localhost/p/test/wiki/')
wf = json.loads(self.webflash(r))
assert_equal(wf['status'], 'error')
assert_equal(
wf['message'],
'Page create/edit rate limit exceeded. Please try again later.')
p = model.Page.query.get(title='page2')
assert_equal(p, None)
def test_rate_limit_by_user(self):
# also test that multiple edits to a page counts as one page towards the limit
# test/wiki/Home and test/sub1/wiki already were created by this user
# and proactively get the user-project wiki created (otherwise it'll be created during the subsequent edits)
self.app.get('/u/test-admin/wiki/')
with h.push_config(config, **{'forgewiki.rate_limits_per_user': '{"3600": 5}'}):
r = self.app.post('/p/test/wiki/page123/update', # page 4 (remember, 3 other projects' wiki pages)
dict(text='Starting a new page, ok', title='page123'))
assert_equal(self.webflash(r), '')
r = self.app.post('/p/test/wiki/page123/update',
dict(text='Editing some', title='page123'))
assert_equal(self.webflash(r), '')
r = self.app.post('/p/test/wiki/page123/update',
dict(text='Still editing', title='page123'))
assert_equal(self.webflash(r), '')
r = self.app.post('/p/test/wiki/pageABC/update', # page 5
dict(text='Another new page', title='pageABC'))
assert_equal(self.webflash(r), '')
r = self.app.post('/p/test/wiki/pageZZZZZ/update', # page 6
dict(text='This new page hits the limit', title='pageZZZZZ'))
wf = json.loads(self.webflash(r))
assert_equal(wf['status'], 'error')
assert_equal(wf['message'], 'Page create/edit rate limit exceeded. Please try again later.')
def test_sidebar_admin_menu(self):
r = self.app.get('/p/test/wiki/Home/')
menu = r.html.find('div', {'id': 'sidebar-admin-menu'})
assert_equal(menu.attrMap['class'], 'hidden') # (not expanded)
menu = [li.find('span').getText() for li in menu.findAll('li')]
assert_equal(
menu,
['Set Home', 'Permissions', 'Options', 'Rename', 'Delete Everything'])
def test_sidebar_admin_menu_is_expanded(self):
r = self.app.get('/p/test/admin/wiki/permissions')
menu = r.html.find('div', {'id': 'sidebar-admin-menu'})
assert_not_in('hidden', menu.attrMap.get('class', '')) # expanded
def test_sidebar_admin_menu_invisible_to_not_admin(self):
def assert_invisible_for(username):
env = {'username': username}
r = self.app.get('/p/test/wiki/Home/', extra_environ=env)
menu = r.html.find('div', {'id': 'sidebar-admin-menu'})
assert_equal(menu, None)
assert_invisible_for('*anonymous')
assert_invisible_for('test-user')<|fim▁end|> | |
<|file_name|>pdf.rs<|end_file_name|><|fim▁begin|>// Copyright 2018-2019, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <https://opensource.org/licenses/MIT>
use std::convert::TryFrom;
use std::ffi::{CStr, CString};
use std::fmt;
use std::io;
use std::mem;
use std::ops::Deref;
use std::path::Path;
use std::ptr;
#[cfg(any(all(feature = "pdf", feature = "v1_16"), feature = "dox"))]
use enums::{PdfMetadata, PdfOutline};
use enums::{PdfVersion, SurfaceType};
use error::Error;
use ffi;
use surface::Surface;
use utils::status_to_result;
#[cfg(feature = "use_glib")]
use glib::translate::*;
impl PdfVersion {
pub fn as_str(self) -> Option<&'static str> {
unsafe {
let res = ffi::cairo_pdf_version_to_string(self.into());
res.as_ref()
.and_then(|cstr| CStr::from_ptr(cstr as _).to_str().ok())
}
}
}
declare_surface!(PdfSurface, SurfaceType::Pdf);
impl PdfSurface {
pub fn new<P: AsRef<Path>>(width: f64, height: f64, path: P) -> Result<Self, Error> {
let path = path.as_ref().to_string_lossy().into_owned();
let path = CString::new(path).unwrap();
unsafe { Self::from_raw_full(ffi::cairo_pdf_surface_create(path.as_ptr(), width, height)) }
}
for_stream_constructors!(cairo_pdf_surface_create_for_stream);
pub fn get_versions() -> impl Iterator<Item = PdfVersion> {
let vers_slice = unsafe {
let mut vers_ptr = ptr::null_mut();
let mut num_vers = mem::MaybeUninit::uninit();
ffi::cairo_pdf_get_versions(&mut vers_ptr, num_vers.as_mut_ptr());
std::slice::from_raw_parts(vers_ptr, num_vers.assume_init() as _)
};
vers_slice.iter().map(|v| PdfVersion::from(*v))
}
pub fn restrict(&self, version: PdfVersion) -> Result<(), Error> {
unsafe {
ffi::cairo_pdf_surface_restrict_to_version(self.0.to_raw_none(), version.into());
}
self.status()
}
pub fn set_size(&self, width: f64, height: f64) -> Result<(), Error> {
unsafe {
ffi::cairo_pdf_surface_set_size(self.0.to_raw_none(), width, height);
}
self.status()
}
#[cfg(any(all(feature = "pdf", feature = "v1_16"), feature = "dox"))]
pub fn set_metadata(&self, metadata: PdfMetadata, value: &str) -> Result<(), Error> {
let value = CString::new(value).unwrap();
unsafe {
ffi::cairo_pdf_surface_set_metadata(
self.0.to_raw_none(),
metadata.into(),
value.as_ptr(),
);
}
self.status()
}
#[cfg(any(all(feature = "pdf", feature = "v1_16"), feature = "dox"))]
pub fn set_page_label(&self, label: &str) -> Result<(), Error> {
let label = CString::new(label).unwrap();
unsafe {
ffi::cairo_pdf_surface_set_page_label(self.0.to_raw_none(), label.as_ptr());
}
self.status()
}
#[cfg(any(all(feature = "pdf", feature = "v1_16"), feature = "dox"))]
pub fn set_thumbnail_size(&self, width: i32, height: i32) -> Result<(), Error> {
unsafe {
ffi::cairo_pdf_surface_set_thumbnail_size(
self.0.to_raw_none(),
width as _,
height as _,
);
}
self.status()
}
<|fim▁hole|> #[cfg(any(all(feature = "pdf", feature = "v1_16"), feature = "dox"))]
pub fn add_outline(
&self,
parent_id: i32,
name: &str,
link_attribs: &str,
flags: PdfOutline,
) -> Result<i32, Error> {
let name = CString::new(name).unwrap();
let link_attribs = CString::new(link_attribs).unwrap();
let res = unsafe {
ffi::cairo_pdf_surface_add_outline(
self.0.to_raw_none(),
parent_id,
name.as_ptr(),
link_attribs.as_ptr(),
flags.bits() as _,
) as _
};
self.status()?;
Ok(res)
}
fn status(&self) -> Result<(), Error> {
let status = unsafe { ffi::cairo_surface_status(self.to_raw_none()) };
status_to_result(status)
}
}
#[cfg(test)]
mod test {
use super::*;
use context::*;
use tempfile::tempfile;
fn draw(surface: &Surface) {
let cr = Context::new(surface);
cr.set_line_width(25.0);
cr.set_source_rgba(1.0, 0.0, 0.0, 0.5);
cr.line_to(0., 0.);
cr.line_to(100., 100.);
cr.stroke();
cr.set_source_rgba(0.0, 0.0, 1.0, 0.5);
cr.line_to(0., 100.);
cr.line_to(100., 0.);
cr.stroke();
}
fn draw_in_buffer() -> Vec<u8> {
let buffer: Vec<u8> = vec![];
let surface = PdfSurface::for_stream(100., 100., buffer).unwrap();
draw(&surface);
*surface.finish_output_stream().unwrap().downcast().unwrap()
}
#[test]
fn versions() {
assert!(PdfSurface::get_versions().any(|v| v == PdfVersion::_1_4));
}
#[test]
fn version_string() {
let ver_str = PdfVersion::_1_4.as_str().unwrap();
assert_eq!(ver_str, "PDF 1.4");
}
#[test]
#[cfg(unix)]
fn file() {
let surface = PdfSurface::new(100., 100., "/dev/null").unwrap();
draw(&surface);
surface.finish();
}
#[test]
fn writer() {
let file = tempfile().expect("tempfile failed");
let surface = PdfSurface::for_stream(100., 100., file).unwrap();
draw(&surface);
let stream = surface.finish_output_stream().unwrap();
let file = stream.downcast::<std::fs::File>().unwrap();
let buffer = draw_in_buffer();
let file_size = file.metadata().unwrap().len();
assert_eq!(file_size, buffer.len() as u64);
}
#[test]
fn ref_writer() {
let mut file = tempfile().expect("tempfile failed");
let surface = unsafe { PdfSurface::for_raw_stream(100., 100., &mut file).unwrap() };
draw(&surface);
surface.finish_output_stream().unwrap();
drop(file);
}
#[test]
fn buffer() {
let buffer = draw_in_buffer();
let header = b"%PDF-1.5";
assert_eq!(&buffer[..header.len()], header);
}
#[test]
fn custom_writer() {
struct CustomWriter(usize);
impl io::Write for CustomWriter {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.0 += buf.len();
Ok(buf.len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
let custom_writer = CustomWriter(0);
let surface = PdfSurface::for_stream(20., 20., custom_writer).unwrap();
surface.set_size(100., 100.).unwrap();
draw(&surface);
let stream = surface.finish_output_stream().unwrap();
let custom_writer = stream.downcast::<CustomWriter>().unwrap();
let buffer = draw_in_buffer();
assert_eq!(custom_writer.0, buffer.len());
}
fn with_panicky_stream() -> PdfSurface {
struct PanicWriter;
impl io::Write for PanicWriter {
fn write(&mut self, _buf: &[u8]) -> io::Result<usize> {
panic!("panic in writer");
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
let surface = PdfSurface::for_stream(20., 20., PanicWriter).unwrap();
surface.finish();
surface
}
#[test]
#[should_panic]
fn finish_stream_propagates_panic() {
let _ = with_panicky_stream().finish_output_stream();
}
}<|fim▁end|> | |
<|file_name|>ehci.rs<|end_file_name|><|fim▁begin|>use core::ptr::{read, write};
use common::debug;
use drivers::pciconfig::PciConfig;
use schemes::KScheme;
#[repr(packed)]
struct Setup {
request_type: u8,
request: u8,
value: u16,
index: u16,
len: u16,
}
#[repr(packed)]
struct Qtd {
next: u32,
next_alt: u32,
token: u32,
buffers: [u32; 5],
}
#[repr(packed)]
struct QueueHead {
next: u32,
characteristics: u32,
capabilities: u32,
qtd_ptr: u32,
qtd: Qtd,
}
pub struct Ehci {
pub pci: PciConfig,
pub base: usize,
pub memory_mapped: bool,
pub irq: u8,
}
impl KScheme for Ehci {
#[allow(non_snake_case)]
fn on_irq(&mut self, irq: u8) {
if irq == self.irq {
// debug::d("EHCI handle");
unsafe {
let CAPLENGTH = self.base as *mut u8;
let opbase = self.base + read(CAPLENGTH) as usize;
let USBSTS = (opbase + 4) as *mut u32;
// debug::d(" USBSTS ");
// debug::dh(*USBSTS as usize);
write(USBSTS, 0b111111);
// debug::d(" USBSTS ");
// debug::dh(*USBSTS as usize);
// let FRINDEX = (opbase + 0xC) as *mut u32;
// debug::d(" FRINDEX ");
// debug::dh(*FRINDEX as usize);
}
// debug::dl();
}
}
}
impl Ehci {
#[allow(non_snake_case)]
pub unsafe fn init(&mut self) {
debug::d("EHCI on: ");
debug::dh(self.base);
if self.memory_mapped {
debug::d(" memory mapped");
} else {
debug::d(" port mapped");
}
debug::d(" IRQ: ");
debug::dbh(self.irq);
debug::dl();
return;
//
//
// let pci = &mut self.pci;
//
// pci.flag(4, 4, true); // Bus master
//
// let CAPLENGTH = self.base as *mut u8;
// let HCSPARAMS = (self.base + 4) as *mut u32;
// let HCCPARAMS = (self.base + 8) as *mut u32;
//
// debug::d(" CAPLENGTH ");
// debug::dd(read(CAPLENGTH) as usize);
//
// debug::d(" HCSPARAMS ");
// debug::dh(read(HCSPARAMS) as usize);
//
// debug::d(" HCCPARAMS ");
// debug::dh(read(HCCPARAMS) as usize);
//
// let ports = (read(HCSPARAMS) & 0b1111) as usize;
// debug::d(" PORTS ");
// debug::dd(ports);
//
// let eecp = ((read(HCCPARAMS) >> 8) & 0xFF) as u8;
// debug::d(" EECP ");
// debug::dh(eecp as usize);
//
// debug::dl();
//<|fim▁hole|> // debug::dh(pci.read(eecp) as usize);
//
// pci.flag(eecp, 1 << 24, true);
//
// debug::d(" ");
// debug::dh(pci.read(eecp) as usize);
// debug::dl();
//
// debug::d("Waiting");
// debug::d(" ");
// debug::dh(pci.read(eecp) as usize);
//
// loop {
// if pci.read(eecp) & ((1 << 24) | (1 << 16)) == (1 << 24) {
// break;
// }
// }
//
// debug::d(" ");
// debug::dh(pci.read(eecp) as usize);
// debug::dl();
// }
// }
//
// let opbase = self.base + *CAPLENGTH as usize;
//
// let USBCMD = opbase as *mut u32;
// let USBSTS = (opbase + 4) as *mut u32;
// let USBINTR = (opbase + 8) as *mut u32;
// let FRINDEX = (opbase + 0xC) as *mut u32;
// let CTRLDSSEGMENT = (opbase + 0x10) as *mut u32;
// let PERIODICLISTBASE = (opbase + 0x14) as *mut u32;
// let ASYNCLISTADDR = (opbase + 0x18) as *mut u32;
// let CONFIGFLAG = (opbase + 0x40) as *mut u32;
// let PORTSC = (opbase + 0x44) as *mut u32;
//
// if read(USBSTS) & (1 << 12) == 0 {
// debug::d("Halting");
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
//
// write(USBCMD, read(USBCMD) & 0xFFFFFFF0);
//
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
// debug::dl();
//
// debug::d("Waiting");
// loop {
// if volatile_load(USBSTS) & (1 << 12) == (1 << 12) {
// break;
// }
// }
//
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
// debug::dl();
// }
//
// debug::d("Resetting");
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
//
// write(USBCMD, read(USBCMD) | (1 << 1));
//
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
// debug::dl();
//
// debug::d("Waiting");
// loop {
// if volatile_load(USBCMD) & (1 << 1) == 0 {
// break;
// }
// }
//
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
// debug::dl();
//
// debug::d("Enabling");
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
//
// write(USBINTR, 0b111111);
//
// write(USBCMD, read(USBCMD) | 1);
// write(CONFIGFLAG, 1);
//
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
// debug::dl();
//
// debug::d("Waiting");
// loop {
// if volatile_load(USBSTS) & (1 << 12) == 0 {
// break;
// }
// }
//
// debug::d(" CMD ");
// debug::dh(read(USBCMD) as usize);
//
// debug::d(" STS ");
// debug::dh(read(USBSTS) as usize);
// debug::dl();
//
// let disable = scheduler::start_ints();
// Duration::new(0, 100 * time::NANOS_PER_MILLI).sleep();
// scheduler::end_ints(disable);
//
// for i in 0..ports as isize {
// debug::dd(i as usize);
// debug::d(": ");
// debug::dh(read(PORTSC.offset(i)) as usize);
// debug::dl();
//
// if read(PORTSC.offset(i)) & 1 == 1 {
// debug::d("Device on port ");
// debug::dd(i as usize);
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
// debug::dl();
//
// if read(PORTSC.offset(i)) & (1 << 1) == (1 << 1) {
// debug::d("Connection Change");
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
//
// write(PORTSC.offset(i), read(PORTSC.offset(i)) | (1 << 1));
//
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
// debug::dl();
// }
//
// if read(PORTSC.offset(i)) & (1 << 2) == 0 {
// debug::d("Reset");
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
//
// write(PORTSC.offset(i), read(PORTSC.offset(i)) | (1 << 8));
//
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
//
// write(PORTSC.offset(i),
// read(PORTSC.offset(i)) & 0xFFFFFEFF);
//
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
// debug::dl();
//
// debug::d("Wait");
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
//
// loop {
// if volatile_load(PORTSC.offset(i)) & (1 << 8) == 0 {
// break;
// } else {
// volatile_store(PORTSC.offset(i),
// volatile_load(PORTSC.offset(i)) & 0xFFFFFEFF);
// }
// }
//
// debug::d(" ");
// debug::dh(read(PORTSC.offset(i)) as usize);
// debug::dl();
// }
//
// if read(PORTSC.offset(i)) & (1 << 2) == (1 << 2) {
// debug::d("Port Enabled ");
// debug::dh(read(PORTSC.offset(i)) as usize);
// debug::dl();
//
//
// let out_qtd = alloc(size_of::<Qtd>()) as *mut Qtd;
// ptr::write(out_qtd, Qtd {
// next: 1,
// next_alt: 1,
// token: (1 << 31) | (0b11 << 10) | 0x80,
// buffers: [0, 0, 0, 0, 0]
// });
//
// let in_data = alloc(64) as *mut u8;
// for i in 0..64 {
// in_data.offset(i) = 0;
// }
//
// let in_qtd = alloc(size_of::<Qtd>()) as *mut Qtd;
// ptr::write(in_qtd, Qtd {
// next: out_qtd as u32,
// next_alt: 1,
// token: (1 << 31) | (64 << 16) | (0b11 << 10) | (0b01 << 8) | 0x80,
// buffers: [in_data as u32, 0, 0, 0, 0]
// });
//
// let setup_packet = alloc(size_of::<Setup>()) as *mut Setup;
// ptr::write(setup_packet, Setup {
// request_type: 0b10000000,
// request: 6,
// value: 1 << 8,
// index: 0,
// len: 64
// });
//
// let setup_qtd = alloc(size_of::<Qtd>()) as *mut Qtd;
// ptr::write(setup_qtd, Qtd {
// next: in_qtd as u32,
// next_alt: 1,
// token: ((size_of::<Setup>() as u32) << 16) | (0b11 << 10) | (0b10 << 8) | 0x80,
// buffers: [setup_packet as u32, 0, 0, 0, 0]
// });
//
// let queuehead = alloc(size_of::<QueueHead>()) as *mut QueueHead;
// ptr::write(queuehead, QueueHead {
// next: 1,
// characteristics: (64 << 16) | (1 << 15) | (1 << 14) | (0b10 << 12),
// capabilities: (0b11 << 30),
// qtd_ptr: setup_qtd as u32,
// qtd: ptr::read(setup_qtd)
// });
//
// debug::d("Prepare");
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" PTR ");
// debug::dh(queuehead as usize);
// debug::dl();
//
// debug::d("Send");
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
//
// ASYNCLISTADDR = queuehead as u32;
//
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
//
// USBCMD |= (1 << 5);
//
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
//
// USBCMD |= 1;
//
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
// debug::dl();
//
// debug::d("Wait");
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
// debug::dl();
//
// loop {
// if *USBSTS & 0xA000 == 0 {
// break;
// }
// }
//
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
// debug::dl();
//
// debug::d("Stop");
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
//
// USBCMD &= 0xFFFFFFFF - (1 << 5);
//
// debug::d(" CMD ");
// debug::dh(*USBCMD as usize);
//
// debug::d(" STS ");
// debug::dh(*USBSTS as usize);
// debug::dl();
//
// d("Data");
// for i in 0..64 {
// debug::d(" ");
// debug::dbh(*in_data.offset(i));
// }
// debug::dl();
//
// Only detect one device for testing
// break;
// /
// } else {
// debug::d("Device not high-speed\n");
// }
// }
// }
//
}
}<|fim▁end|> | // if eecp > 0 {
// if pci.read(eecp) & ((1 << 24) | (1 << 16)) == (1 << 16) {
// debug::d("Taking Ownership");
// debug::d(" "); |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
Serializers and ModelSerializers are similar to Forms and ModelForms.
Unlike forms, they are not constrained to dealing with HTML output, and
form encoded input.
Serialization in REST framework is a two-phase process:
1. Serializers marshal between complex types like model instances, and
python primitives.
2. The process of marshalling between python primitives and request and
response content is handled by parsers and renderers.
"""
from __future__ import unicode_literals
import copy
import datetime
import inspect
import types
from decimal import Decimal
from django.contrib.contenttypes.generic import GenericForeignKey
from django.core.paginator import Page
from django.db import models
from django.forms import widgets
from django.utils.datastructures import SortedDict
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.compat import get_concrete_model, six
from rest_framework.settings import api_settings
# Note: We do the following so that users of the framework can use this style:
#
# example_field = serializers.CharField(...)
#
# This helps keep the separation between model fields, form fields, and
# serializer fields more explicit.
from rest_framework.relations import * # NOQA
from rest_framework.fields import * # NOQA
def _resolve_model(obj):
"""
Resolve supplied `obj` to a Django model class.
`obj` must be a Django model class itself, or a string
representation of one. Useful in situtations like GH #1225 where
Django may not have resolved a string-based reference to a model in
another model's foreign key definition.
String representations should have the format:
'appname.ModelName'
"""
if isinstance(obj, six.string_types) and len(obj.split('.')) == 2:
app_name, model_name = obj.split('.')
return models.get_model(app_name, model_name)
elif inspect.isclass(obj) and issubclass(obj, models.Model):
return obj
else:
raise ValueError("{0} is not a Django model".format(obj))
def pretty_name(name):
"""Converts 'first_name' to 'First name'"""
if not name:
return ''
return name.replace('_', ' ').capitalize()
class RelationsList(list):
_deleted = []
class NestedValidationError(ValidationError):
"""
The default ValidationError behavior is to stringify each item in the list
if the messages are a list of error messages.
In the case of nested serializers, where the parent has many children,
then the child's `serializer.errors` will be a list of dicts. In the case
of a single child, the `serializer.errors` will be a dict.
We need to override the default behavior to get properly nested error dicts.
"""
def __init__(self, message):
if isinstance(message, dict):
self._messages = [message]
else:
self._messages = message
@property
def messages(self):
return self._messages
class DictWithMetadata(dict):
"""
A dict-like object, that can have additional properties attached.
"""
def __getstate__(self):
"""
Used by pickle (e.g., caching).
Overridden to remove the metadata from the dict, since it shouldn't be
pickled and may in some instances be unpickleable.
"""
return dict(self)
class SortedDictWithMetadata(SortedDict):
"""
A sorted dict-like object, that can have additional properties attached.
"""
def __getstate__(self):
"""
Used by pickle (e.g., caching).
Overriden to remove the metadata from the dict, since it shouldn't be
pickle and may in some instances be unpickleable.
"""
return SortedDict(self).__dict__
def _is_protected_type(obj):
"""
True if the object is a native datatype that does not need to
be serialized further.
"""
return isinstance(obj, (
types.NoneType,
int, long,
datetime.datetime, datetime.date, datetime.time,
float, Decimal,
basestring)
)
def _get_declared_fields(bases, attrs):
"""
Create a list of serializer field instances from the passed in 'attrs',
plus any fields on the base classes (in 'bases').
<|fim▁hole|> if isinstance(obj, Field)]
fields.sort(key=lambda x: x[1].creation_counter)
# If this class is subclassing another Serializer, add that Serializer's
# fields. Note that we loop over the bases in *reverse*. This is necessary
# in order to maintain the correct order of fields.
for base in bases[::-1]:
if hasattr(base, 'base_fields'):
fields = list(base.base_fields.items()) + fields
return SortedDict(fields)
class SerializerMetaclass(type):
def __new__(cls, name, bases, attrs):
attrs['base_fields'] = _get_declared_fields(bases, attrs)
return super(SerializerMetaclass, cls).__new__(cls, name, bases, attrs)
class SerializerOptions(object):
"""
Meta class options for Serializer
"""
def __init__(self, meta):
self.depth = getattr(meta, 'depth', 0)
self.fields = getattr(meta, 'fields', ())
self.exclude = getattr(meta, 'exclude', ())
class BaseSerializer(WritableField):
"""
This is the Serializer implementation.
We need to implement it as `BaseSerializer` due to metaclass magicks.
"""
class Meta(object):
pass
_options_class = SerializerOptions
_dict_class = SortedDictWithMetadata
def __init__(self, instance=None, data=None, files=None,
context=None, partial=False, many=None,
allow_add_remove=False, **kwargs):
super(BaseSerializer, self).__init__(**kwargs)
self.opts = self._options_class(self.Meta)
self.parent = None
self.root = None
self.partial = partial
self.many = many
self.allow_add_remove = allow_add_remove
self.context = context or {}
self.init_data = data
self.init_files = files
self.object = instance
self.fields = self.get_fields()
self._data = None
self._files = None
self._errors = None
if many and instance is not None and not hasattr(instance, '__iter__'):
raise ValueError('instance should be a queryset or other iterable with many=True')
if allow_add_remove and not many:
raise ValueError('allow_add_remove should only be used for bulk updates, but you have not set many=True')
#####
# Methods to determine which fields to use when (de)serializing objects.
def get_default_fields(self):
"""
Return the complete set of default fields for the object, as a dict.
"""
return {}
def get_fields(self):
"""
Returns the complete set of fields for the object as a dict.
This will be the set of any explicitly declared fields,
plus the set of fields returned by get_default_fields().
"""
ret = SortedDict()
# Get the explicitly declared fields
base_fields = copy.deepcopy(self.base_fields)
for key, field in base_fields.items():
ret[key] = field
# Add in the default fields
default_fields = self.get_default_fields()
for key, val in default_fields.items():
if key not in ret:
ret[key] = val
# If 'fields' is specified, use those fields, in that order.
if self.opts.fields:
assert isinstance(self.opts.fields, (list, tuple)), '`fields` must be a list or tuple'
new = SortedDict()
for key in self.opts.fields:
new[key] = ret[key]
ret = new
# Remove anything in 'exclude'
if self.opts.exclude:
assert isinstance(self.opts.exclude, (list, tuple)), '`exclude` must be a list or tuple'
for key in self.opts.exclude:
ret.pop(key, None)
for key, field in ret.items():
field.initialize(parent=self, field_name=key)
return ret
#####
# Methods to convert or revert from objects <--> primitive representations.
def get_field_key(self, field_name):
"""
Return the key that should be used for a given field.
"""
return field_name
def restore_fields(self, data, files):
"""
Core of deserialization, together with `restore_object`.
Converts a dictionary of data into a dictionary of deserialized fields.
"""
reverted_data = {}
if data is not None and not isinstance(data, dict):
self._errors['non_field_errors'] = ['Invalid data']
return None
for field_name, field in self.fields.items():
field.initialize(parent=self, field_name=field_name)
try:
field.field_from_native(data, files, field_name, reverted_data)
except ValidationError as err:
self._errors[field_name] = list(err.messages)
return reverted_data
def perform_validation(self, attrs):
"""
Run `validate_<fieldname>()` and `validate()` methods on the serializer
"""
for field_name, field in self.fields.items():
if field_name in self._errors:
continue
source = field.source or field_name
if self.partial and source not in attrs:
continue
try:
validate_method = getattr(self, 'validate_%s' % field_name, None)
if validate_method:
attrs = validate_method(attrs, source)
except ValidationError as err:
self._errors[field_name] = self._errors.get(field_name, []) + list(err.messages)
# If there are already errors, we don't run .validate() because
# field-validation failed and thus `attrs` may not be complete.
# which in turn can cause inconsistent validation errors.
if not self._errors:
try:
attrs = self.validate(attrs)
except ValidationError as err:
if hasattr(err, 'message_dict'):
for field_name, error_messages in err.message_dict.items():
self._errors[field_name] = self._errors.get(field_name, []) + list(error_messages)
elif hasattr(err, 'messages'):
self._errors['non_field_errors'] = err.messages
return attrs
def validate(self, attrs):
"""
Stub method, to be overridden in Serializer subclasses
"""
return attrs
def restore_object(self, attrs, instance=None):
"""
Deserialize a dictionary of attributes into an object instance.
You should override this method to control how deserialized objects
are instantiated.
"""
if instance is not None:
instance.update(attrs)
return instance
return attrs
def to_native(self, obj):
"""
Serialize objects -> primitives.
"""
ret = self._dict_class()
ret.fields = self._dict_class()
for field_name, field in self.fields.items():
if field.read_only and obj is None:
continue
field.initialize(parent=self, field_name=field_name)
key = self.get_field_key(field_name)
value = field.field_to_native(obj, field_name)
method = getattr(self, 'transform_%s' % field_name, None)
if callable(method):
value = method(obj, value)
if not getattr(field, 'write_only', False):
ret[key] = value
ret.fields[key] = self.augment_field(field, field_name, key, value)
return ret
def from_native(self, data, files=None):
"""
Deserialize primitives -> objects.
"""
self._errors = {}
if data is not None or files is not None:
attrs = self.restore_fields(data, files)
if attrs is not None:
attrs = self.perform_validation(attrs)
else:
self._errors['non_field_errors'] = ['No input provided']
if not self._errors:
return self.restore_object(attrs, instance=getattr(self, 'object', None))
def augment_field(self, field, field_name, key, value):
# This horrible stuff is to manage serializers rendering to HTML
field._errors = self._errors.get(key) if self._errors else None
field._name = field_name
field._value = self.init_data.get(key) if self._errors and self.init_data else value
if not field.label:
field.label = pretty_name(key)
return field
def field_to_native(self, obj, field_name):
"""
Override default so that the serializer can be used as a nested field
across relationships.
"""
if self.write_only:
return None
if self.source == '*':
return self.to_native(obj)
# Get the raw field value
try:
source = self.source or field_name
value = obj
for component in source.split('.'):
if value is None:
break
value = get_component(value, component)
except ObjectDoesNotExist:
return None
if is_simple_callable(getattr(value, 'all', None)):
return [self.to_native(item) for item in value.all()]
if value is None:
return None
if self.many is not None:
many = self.many
else:
many = hasattr(value, '__iter__') and not isinstance(value, (Page, dict, six.text_type))
if many:
return [self.to_native(item) for item in value]
return self.to_native(value)
def field_from_native(self, data, files, field_name, into):
"""
Override default so that the serializer can be used as a writable
nested field across relationships.
"""
if self.read_only:
return
try:
value = data[field_name]
except KeyError:
if self.default is not None and not self.partial:
# Note: partial updates shouldn't set defaults
value = copy.deepcopy(self.default)
else:
if self.required:
raise ValidationError(self.error_messages['required'])
return
if self.source == '*':
if value:
reverted_data = self.restore_fields(value, {})
if not self._errors:
into.update(reverted_data)
else:
if value in (None, ''):
into[(self.source or field_name)] = None
else:
# Set the serializer object if it exists
obj = get_component(self.parent.object, self.source or field_name) if self.parent.object else None
# If we have a model manager or similar object then we need
# to iterate through each instance.
if (self.many and
not hasattr(obj, '__iter__') and
is_simple_callable(getattr(obj, 'all', None))):
obj = obj.all()
kwargs = {
'instance': obj,
'data': value,
'context': self.context,
'partial': self.partial,
'many': self.many,
'allow_add_remove': self.allow_add_remove
}
serializer = self.__class__(**kwargs)
if serializer.is_valid():
into[self.source or field_name] = serializer.object
else:
# Propagate errors up to our parent
raise NestedValidationError(serializer.errors)
def get_identity(self, data):
"""
This hook is required for bulk update.
It is used to determine the canonical identity of a given object.
Note that the data has not been validated at this point, so we need
to make sure that we catch any cases of incorrect datatypes being
passed to this method.
"""
try:
return data.get('id', None)
except AttributeError:
return None
@property
def errors(self):
"""
Run deserialization and return error data,
setting self.object if no errors occurred.
"""
if self._errors is None:
data, files = self.init_data, self.init_files
if self.many is not None:
many = self.many
else:
many = hasattr(data, '__iter__') and not isinstance(data, (Page, dict, six.text_type))
if many:
warnings.warn('Implicit list/queryset serialization is deprecated. '
'Use the `many=True` flag when instantiating the serializer.',
DeprecationWarning, stacklevel=3)
if many:
ret = RelationsList()
errors = []
update = self.object is not None
if update:
# If this is a bulk update we need to map all the objects
# to a canonical identity so we can determine which
# individual object is being updated for each item in the
# incoming data
objects = self.object
identities = [self.get_identity(self.to_native(obj)) for obj in objects]
identity_to_objects = dict(zip(identities, objects))
if hasattr(data, '__iter__') and not isinstance(data, (dict, six.text_type)):
for item in data:
if update:
# Determine which object we're updating
identity = self.get_identity(item)
self.object = identity_to_objects.pop(identity, None)
if self.object is None and not self.allow_add_remove:
ret.append(None)
errors.append({'non_field_errors': ['Cannot create a new item, only existing items may be updated.']})
continue
ret.append(self.from_native(item, None))
errors.append(self._errors)
if update and self.allow_add_remove:
ret._deleted = identity_to_objects.values()
self._errors = any(errors) and errors or []
else:
self._errors = {'non_field_errors': ['Expected a list of items.']}
else:
ret = self.from_native(data, files)
if not self._errors:
self.object = ret
return self._errors
def is_valid(self):
return not self.errors
@property
def data(self):
"""
Returns the serialized data on the serializer.
"""
if self._data is None:
obj = self.object
if self.many is not None:
many = self.many
else:
many = hasattr(obj, '__iter__') and not isinstance(obj, (Page, dict))
if many:
warnings.warn('Implicit list/queryset serialization is deprecated. '
'Use the `many=True` flag when instantiating the serializer.',
DeprecationWarning, stacklevel=2)
if many:
self._data = [self.to_native(item) for item in obj]
else:
self._data = self.to_native(obj)
return self._data
def save_object(self, obj, **kwargs):
obj.save(**kwargs)
def delete_object(self, obj):
obj.delete()
def save(self, **kwargs):
"""
Save the deserialized object and return it.
"""
# Clear cached _data, which may be invalidated by `save()`
self._data = None
if isinstance(self.object, list):
[self.save_object(item, **kwargs) for item in self.object]
if self.object._deleted:
[self.delete_object(item) for item in self.object._deleted]
else:
self.save_object(self.object, **kwargs)
return self.object
def metadata(self):
"""
Return a dictionary of metadata about the fields on the serializer.
Useful for things like responding to OPTIONS requests, or generating
API schemas for auto-documentation.
"""
return SortedDict(
[(field_name, field.metadata())
for field_name, field in six.iteritems(self.fields)]
)
class Serializer(six.with_metaclass(SerializerMetaclass, BaseSerializer)):
pass
class ModelSerializerOptions(SerializerOptions):
"""
Meta class options for ModelSerializer
"""
def __init__(self, meta):
super(ModelSerializerOptions, self).__init__(meta)
self.model = getattr(meta, 'model', None)
self.read_only_fields = getattr(meta, 'read_only_fields', ())
self.write_only_fields = getattr(meta, 'write_only_fields', ())
class ModelSerializer(Serializer):
"""
A serializer that deals with model instances and querysets.
"""
_options_class = ModelSerializerOptions
field_mapping = {
models.AutoField: IntegerField,
models.FloatField: FloatField,
models.IntegerField: IntegerField,
models.PositiveIntegerField: IntegerField,
models.SmallIntegerField: IntegerField,
models.PositiveSmallIntegerField: IntegerField,
models.DateTimeField: DateTimeField,
models.DateField: DateField,
models.TimeField: TimeField,
models.DecimalField: DecimalField,
models.EmailField: EmailField,
models.CharField: CharField,
models.URLField: URLField,
models.SlugField: SlugField,
models.TextField: CharField,
models.CommaSeparatedIntegerField: CharField,
models.BooleanField: BooleanField,
models.NullBooleanField: BooleanField,
models.FileField: FileField,
models.ImageField: ImageField,
}
def get_default_fields(self):
"""
Return all the fields that should be serialized for the model.
"""
cls = self.opts.model
assert cls is not None, \
"Serializer class '%s' is missing 'model' Meta option" % self.__class__.__name__
opts = get_concrete_model(cls)._meta
ret = SortedDict()
nested = bool(self.opts.depth)
# Deal with adding the primary key field
pk_field = opts.pk
while pk_field.rel and pk_field.rel.parent_link:
# If model is a child via multitable inheritance, use parent's pk
pk_field = pk_field.rel.to._meta.pk
field = self.get_pk_field(pk_field)
if field:
ret[pk_field.name] = field
# Deal with forward relationships
forward_rels = [field for field in opts.fields if field.serialize]
forward_rels += [field for field in opts.many_to_many if field.serialize]
for model_field in forward_rels:
has_through_model = False
if model_field.rel:
to_many = isinstance(model_field,
models.fields.related.ManyToManyField)
related_model = _resolve_model(model_field.rel.to)
if to_many and not model_field.rel.through._meta.auto_created:
has_through_model = True
if model_field.rel and nested:
if len(inspect.getargspec(self.get_nested_field).args) == 2:
warnings.warn(
'The `get_nested_field(model_field)` call signature '
'is due to be deprecated. '
'Use `get_nested_field(model_field, related_model, '
'to_many) instead',
PendingDeprecationWarning
)
field = self.get_nested_field(model_field)
else:
field = self.get_nested_field(model_field, related_model, to_many)
elif model_field.rel:
if len(inspect.getargspec(self.get_nested_field).args) == 3:
warnings.warn(
'The `get_related_field(model_field, to_many)` call '
'signature is due to be deprecated. '
'Use `get_related_field(model_field, related_model, '
'to_many) instead',
PendingDeprecationWarning
)
field = self.get_related_field(model_field, to_many=to_many)
else:
field = self.get_related_field(model_field, related_model, to_many)
else:
field = self.get_field(model_field)
if field:
if has_through_model:
field.read_only = True
ret[model_field.name] = field
# Deal with reverse relationships
if not self.opts.fields:
reverse_rels = []
else:
# Reverse relationships are only included if they are explicitly
# present in the `fields` option on the serializer
reverse_rels = opts.get_all_related_objects()
reverse_rels += opts.get_all_related_many_to_many_objects()
for relation in reverse_rels:
accessor_name = relation.get_accessor_name()
if not self.opts.fields or accessor_name not in self.opts.fields:
continue
related_model = relation.model
to_many = relation.field.rel.multiple
has_through_model = False
is_m2m = isinstance(relation.field,
models.fields.related.ManyToManyField)
if (is_m2m and
hasattr(relation.field.rel, 'through') and
not relation.field.rel.through._meta.auto_created):
has_through_model = True
if nested:
field = self.get_nested_field(None, related_model, to_many)
else:
field = self.get_related_field(None, related_model, to_many)
if field:
if has_through_model:
field.read_only = True
ret[accessor_name] = field
# Ensure that 'read_only_fields' is an iterable
assert isinstance(self.opts.read_only_fields, (list, tuple)), '`read_only_fields` must be a list or tuple'
# Add the `read_only` flag to any fields that have been specified
# in the `read_only_fields` option
for field_name in self.opts.read_only_fields:
assert field_name not in self.base_fields.keys(), (
"field '%s' on serializer '%s' specified in "
"`read_only_fields`, but also added "
"as an explicit field. Remove it from `read_only_fields`." %
(field_name, self.__class__.__name__))
assert field_name in ret, (
"Non-existant field '%s' specified in `read_only_fields` "
"on serializer '%s'." %
(field_name, self.__class__.__name__))
ret[field_name].read_only = True
# Ensure that 'write_only_fields' is an iterable
assert isinstance(self.opts.write_only_fields, (list, tuple)), '`write_only_fields` must be a list or tuple'
for field_name in self.opts.write_only_fields:
assert field_name not in self.base_fields.keys(), (
"field '%s' on serializer '%s' specified in "
"`write_only_fields`, but also added "
"as an explicit field. Remove it from `write_only_fields`." %
(field_name, self.__class__.__name__))
assert field_name in ret, (
"Non-existant field '%s' specified in `write_only_fields` "
"on serializer '%s'." %
(field_name, self.__class__.__name__))
ret[field_name].write_only = True
return ret
def get_pk_field(self, model_field):
"""
Returns a default instance of the pk field.
"""
return self.get_field(model_field)
def get_nested_field(self, model_field, related_model, to_many):
"""
Creates a default instance of a nested relational field.
Note that model_field will be `None` for reverse relationships.
"""
class NestedModelSerializer(ModelSerializer):
class Meta:
model = related_model
depth = self.opts.depth - 1
return NestedModelSerializer(many=to_many)
def get_related_field(self, model_field, related_model, to_many):
"""
Creates a default instance of a flat relational field.
Note that model_field will be `None` for reverse relationships.
"""
# TODO: filter queryset using:
# .using(db).complex_filter(self.rel.limit_choices_to)
kwargs = {
'queryset': related_model._default_manager,
'many': to_many
}
if model_field:
kwargs['required'] = not(model_field.null or model_field.blank)
if model_field.help_text is not None:
kwargs['help_text'] = model_field.help_text
if model_field.verbose_name is not None:
kwargs['label'] = model_field.verbose_name
if not model_field.editable:
kwargs['read_only'] = True
if model_field.verbose_name is not None:
kwargs['label'] = model_field.verbose_name
if model_field.help_text is not None:
kwargs['help_text'] = model_field.help_text
return PrimaryKeyRelatedField(**kwargs)
def get_field(self, model_field):
"""
Creates a default instance of a basic non-relational field.
"""
kwargs = {}
if model_field.null or model_field.blank:
kwargs['required'] = False
if isinstance(model_field, models.AutoField) or not model_field.editable:
kwargs['read_only'] = True
if model_field.has_default():
kwargs['default'] = model_field.get_default()
if issubclass(model_field.__class__, models.TextField):
kwargs['widget'] = widgets.Textarea
if model_field.verbose_name is not None:
kwargs['label'] = model_field.verbose_name
if model_field.help_text is not None:
kwargs['help_text'] = model_field.help_text
# TODO: TypedChoiceField?
if model_field.flatchoices: # This ModelField contains choices
kwargs['choices'] = model_field.flatchoices
if model_field.null:
kwargs['empty'] = None
return ChoiceField(**kwargs)
# put this below the ChoiceField because min_value isn't a valid initializer
if issubclass(model_field.__class__, models.PositiveIntegerField) or\
issubclass(model_field.__class__, models.PositiveSmallIntegerField):
kwargs['min_value'] = 0
attribute_dict = {
models.CharField: ['max_length'],
models.CommaSeparatedIntegerField: ['max_length'],
models.DecimalField: ['max_digits', 'decimal_places'],
models.EmailField: ['max_length'],
models.FileField: ['max_length'],
models.ImageField: ['max_length'],
models.SlugField: ['max_length'],
models.URLField: ['max_length'],
}
if model_field.__class__ in attribute_dict:
attributes = attribute_dict[model_field.__class__]
for attribute in attributes:
kwargs.update({attribute: getattr(model_field, attribute)})
try:
return self.field_mapping[model_field.__class__](**kwargs)
except KeyError:
return ModelField(model_field=model_field, **kwargs)
def get_validation_exclusions(self, instance=None):
"""
Return a list of field names to exclude from model validation.
"""
cls = self.opts.model
opts = get_concrete_model(cls)._meta
exclusions = [field.name for field in opts.fields + opts.many_to_many]
for field_name, field in self.fields.items():
field_name = field.source or field_name
if field_name in exclusions \
and not field.read_only \
and (field.required or hasattr(instance, field_name)) \
and not isinstance(field, Serializer):
exclusions.remove(field_name)
return exclusions
def full_clean(self, instance):
"""
Perform Django's full_clean, and populate the `errors` dictionary
if any validation errors occur.
Note that we don't perform this inside the `.restore_object()` method,
so that subclasses can override `.restore_object()`, and still get
the full_clean validation checking.
"""
try:
instance.full_clean(exclude=self.get_validation_exclusions(instance))
except ValidationError as err:
self._errors = err.message_dict
return None
return instance
def restore_object(self, attrs, instance=None):
"""
Restore the model instance.
"""
m2m_data = {}
related_data = {}
nested_forward_relations = {}
meta = self.opts.model._meta
# Reverse fk or one-to-one relations
for (obj, model) in meta.get_all_related_objects_with_model():
field_name = obj.get_accessor_name()
if field_name in attrs:
related_data[field_name] = attrs.pop(field_name)
# Reverse m2m relations
for (obj, model) in meta.get_all_related_m2m_objects_with_model():
field_name = obj.get_accessor_name()
if field_name in attrs:
m2m_data[field_name] = attrs.pop(field_name)
# Forward m2m relations
for field in meta.many_to_many + meta.virtual_fields:
if isinstance(field, GenericForeignKey):
continue
if field.name in attrs:
m2m_data[field.name] = attrs.pop(field.name)
# Nested forward relations - These need to be marked so we can save
# them before saving the parent model instance.
for field_name in attrs.keys():
if isinstance(self.fields.get(field_name, None), Serializer):
nested_forward_relations[field_name] = attrs[field_name]
# Create an empty instance of the model
if instance is None:
instance = self.opts.model()
for key, val in attrs.items():
try:
setattr(instance, key, val)
except ValueError:
self._errors[key] = self.error_messages['required']
# Any relations that cannot be set until we've
# saved the model get hidden away on these
# private attributes, so we can deal with them
# at the point of save.
instance._related_data = related_data
instance._m2m_data = m2m_data
instance._nested_forward_relations = nested_forward_relations
return instance
def from_native(self, data, files):
"""
Override the default method to also include model field validation.
"""
instance = super(ModelSerializer, self).from_native(data, files)
if not self._errors:
return self.full_clean(instance)
def save_object(self, obj, **kwargs):
"""
Save the deserialized object.
"""
if getattr(obj, '_nested_forward_relations', None):
# Nested relationships need to be saved before we can save the
# parent instance.
for field_name, sub_object in obj._nested_forward_relations.items():
if sub_object:
self.save_object(sub_object)
setattr(obj, field_name, sub_object)
obj.save(**kwargs)
if getattr(obj, '_m2m_data', None):
for accessor_name, object_list in obj._m2m_data.items():
setattr(obj, accessor_name, object_list)
del(obj._m2m_data)
if getattr(obj, '_related_data', None):
related_fields = dict([
(field.get_accessor_name(), field)
for field, model
in obj._meta.get_all_related_objects_with_model()
])
for accessor_name, related in obj._related_data.items():
if isinstance(related, RelationsList):
# Nested reverse fk relationship
for related_item in related:
fk_field = related_fields[accessor_name].field.name
setattr(related_item, fk_field, obj)
self.save_object(related_item)
# Delete any removed objects
if related._deleted:
[self.delete_object(item) for item in related._deleted]
elif isinstance(related, models.Model):
# Nested reverse one-one relationship
fk_field = obj._meta.get_field_by_name(accessor_name)[0].field.name
setattr(related, fk_field, obj)
self.save_object(related)
else:
# Reverse FK or reverse one-one
setattr(obj, accessor_name, related)
del(obj._related_data)
class HyperlinkedModelSerializerOptions(ModelSerializerOptions):
"""
Options for HyperlinkedModelSerializer
"""
def __init__(self, meta):
super(HyperlinkedModelSerializerOptions, self).__init__(meta)
self.view_name = getattr(meta, 'view_name', None)
self.lookup_field = getattr(meta, 'lookup_field', None)
self.url_field_name = getattr(meta, 'url_field_name', api_settings.URL_FIELD_NAME)
class HyperlinkedModelSerializer(ModelSerializer):
"""
A subclass of ModelSerializer that uses hyperlinked relationships,
instead of primary key relationships.
"""
_options_class = HyperlinkedModelSerializerOptions
_default_view_name = '%(model_name)s-detail'
_hyperlink_field_class = HyperlinkedRelatedField
_hyperlink_identify_field_class = HyperlinkedIdentityField
def get_default_fields(self):
fields = super(HyperlinkedModelSerializer, self).get_default_fields()
if self.opts.view_name is None:
self.opts.view_name = self._get_default_view_name(self.opts.model)
if self.opts.url_field_name not in fields:
url_field = self._hyperlink_identify_field_class(
view_name=self.opts.view_name,
lookup_field=self.opts.lookup_field
)
ret = self._dict_class()
ret[self.opts.url_field_name] = url_field
ret.update(fields)
fields = ret
return fields
def get_pk_field(self, model_field):
if self.opts.fields and model_field.name in self.opts.fields:
return self.get_field(model_field)
def get_related_field(self, model_field, related_model, to_many):
"""
Creates a default instance of a flat relational field.
"""
# TODO: filter queryset using:
# .using(db).complex_filter(self.rel.limit_choices_to)
kwargs = {
'queryset': related_model._default_manager,
'view_name': self._get_default_view_name(related_model),
'many': to_many
}
if model_field:
kwargs['required'] = not(model_field.null or model_field.blank)
if model_field.help_text is not None:
kwargs['help_text'] = model_field.help_text
if model_field.verbose_name is not None:
kwargs['label'] = model_field.verbose_name
if self.opts.lookup_field:
kwargs['lookup_field'] = self.opts.lookup_field
return self._hyperlink_field_class(**kwargs)
def get_identity(self, data):
"""
This hook is required for bulk update.
We need to override the default, to use the url as the identity.
"""
try:
return data.get(self.opts.url_field_name, None)
except AttributeError:
return None
def _get_default_view_name(self, model):
"""
Return the view name to use if 'view_name' is not specified in 'Meta'
"""
model_meta = model._meta
format_kwargs = {
'app_label': model_meta.app_label,
'model_name': model_meta.object_name.lower()
}
return self._default_view_name % format_kwargs<|fim▁end|> | Note that all fields from the base classes are used.
"""
fields = [(field_name, attrs.pop(field_name))
for field_name, obj in list(six.iteritems(attrs)) |
<|file_name|>ModifiedTanimotoDistanceConfig.java<|end_file_name|><|fim▁begin|>package nl.esciencecenter.e3dchem.modifiedtanimoto;
import org.knime.base.util.flowvariable.FlowVariableProvider;
import org.knime.core.data.DataTableSpec;
import org.knime.core.node.InvalidSettingsException;
import org.knime.core.node.util.CheckUtils;
import org.knime.distance.category.DistanceCategoryConfig;
import org.knime.distance.util.propertyresolver.Configuration;
import org.knime.distance.util.propertyresolver.Property;
@Configuration
public final class ModifiedTanimotoDistanceConfig extends
DistanceCategoryConfig<ModifiedTanimotoDistance> {
@Property("meanBitDensity")
private double meanBitDensity = 0.01;
/**
* Framework constructor.
*/
ModifiedTanimotoDistanceConfig() {
}
public ModifiedTanimotoDistanceConfig(final double meanBitDensity,<|fim▁hole|> super(column);
this.meanBitDensity = meanBitDensity;
CheckUtils.checkSetting(meanBitDensity >= 0,
"mean bit density is not positive: %f ", meanBitDensity);
}
@Override
protected DistanceCategoryConfig<?> clone(String... columns)
throws InvalidSettingsException {
CheckUtils.checkSetting(columns != null && columns.length == 1,
"Exactly one column must be selected.");
return new ModifiedTanimotoDistanceConfig(meanBitDensity, columns[0]);
}
@Override
public String getFactoryId() {
return ModifiedTanimotoDistanceFactory.ID;
}
/**
* {@inheritDoc}
*/
@Override
public ModifiedTanimotoDistance createDistanceMeasure(DataTableSpec spec,
FlowVariableProvider flowVariableProvider)
throws InvalidSettingsException {
return new ModifiedTanimotoDistance(this, spec);
}
public double getMeanBitDensity() {
return meanBitDensity;
}
public void setMeanBitDensity(final double meanBitDensity) {
this.meanBitDensity = meanBitDensity;
}
}<|fim▁end|> | String column) throws InvalidSettingsException { |
<|file_name|>20_58ee75910929_add_theme_to_config_.py<|end_file_name|><|fim▁begin|>"""Add theme to config
Revision ID: 58ee75910929
Revises: 1c22ceb384a7
Create Date: 2015-08-28 15:15:47.971807
"""
# revision identifiers, used by Alembic.
revision = '58ee75910929'
down_revision = '1c22ceb384a7'
from alembic import op
import sqlalchemy as sa
<|fim▁hole|>
def downgrade():
op.execute("DELETE FROM config WHERE category='general' AND key='theme'")<|fim▁end|> |
def upgrade():
op.execute("INSERT INTO config (category, key, value, description) VALUES ('general', 'theme', '\"zkpylons\"', 'The enabled theme to use. Should match the theme folder name (requires a server restart to take effect)')") |
<|file_name|>descriptor_test.rs<|end_file_name|><|fim▁begin|>use super::*;
#[test]
fn merge_aliases_empty() {
let base = IndexMap::new();
let extended = IndexMap::new();
let output = merge_aliases(&base, &extended);
assert!(output.is_empty());
}
#[test]
fn merge_aliases_base_only() {
let mut base = IndexMap::new();
base.insert("old".to_string(), "new".to_string());
let extended = IndexMap::new();
let output = merge_aliases(&base, &extended);
assert_eq!(output.len(), 1);
assert_eq!(output.get("old").unwrap(), "new");
}
#[test]
fn merge_aliases_extended_only() {
let base = IndexMap::new();
let mut extended = IndexMap::new();
extended.insert("old".to_string(), "new".to_string());
let output = merge_aliases(&base, &extended);
assert_eq!(output.len(), 1);
assert_eq!(output.get("old").unwrap(), "new");
}
#[test]
fn merge_aliases_both_and_duplicates() {
let mut base = IndexMap::new();
base.insert("base".to_string(), "base2".to_string());
base.insert("test".to_string(), "base".to_string());
let mut extended = IndexMap::new();
extended.insert("extended".to_string(), "extended2".to_string());
extended.insert("test".to_string(), "extended".to_string());
let output = merge_aliases(&base, &extended);
assert_eq!(output.len(), 3);
assert_eq!(output.get("base").unwrap(), "base2");
assert_eq!(output.get("extended").unwrap(), "extended2");
assert_eq!(output.get("test").unwrap(), "extended");
}
#[test]
fn merge_plugins_map_empty() {
let base = IndexMap::new();
let extended = IndexMap::new();
let output = merge_plugins_map(&base, &extended);
assert!(output.is_empty());
}
#[test]
fn merge_plugins_map_base_only() {
let mut base = IndexMap::new();
base.insert(
"plugin".to_string(),
Plugin {
script: "test".to_string(),
},
);
let extended = IndexMap::new();
let output = merge_plugins_map(&base, &extended);
assert_eq!(output.len(), 1);
assert_eq!(output.get("plugin").unwrap().script, "test");
}
#[test]
fn merge_plugins_map_extended_only() {
let base = IndexMap::new();
let mut extended = IndexMap::new();
extended.insert(
"plugin".to_string(),
Plugin {
script: "test".to_string(),
},
);
let output = merge_plugins_map(&base, &extended);
assert_eq!(output.len(), 1);
assert_eq!(output.get("plugin").unwrap().script, "test");
}
#[test]
fn merge_plugins_map_both_and_duplicates() {
let mut base = IndexMap::new();
base.insert(
"base".to_string(),
Plugin {
script: "base".to_string(),
},
);
base.insert(
"test".to_string(),
Plugin {
script: "test1".to_string(),
},
);
let mut extended = IndexMap::new();
extended.insert(
"extended".to_string(),
Plugin {
script: "extended".to_string(),
},
);
extended.insert(
"test".to_string(),
Plugin {
script: "test2".to_string(),
},
);
let output = merge_plugins_map(&base, &extended);
assert_eq!(output.len(), 3);
assert_eq!(output.get("base").unwrap().script, "base");
assert_eq!(output.get("extended").unwrap().script, "extended");
assert_eq!(output.get("test").unwrap().script, "test2");
}
#[test]
fn merge_plugins_config_impl_aliases_none() {
let base = Plugins::new();
let extended = Plugins::new();
let output = merge_plugins_config_impl(base, extended);
assert!(output.aliases.is_none());
assert!(output.plugins.is_empty());
}
<|fim▁hole|> let mut aliases = IndexMap::new();
aliases.insert("old".to_string(), "new".to_string());
base.aliases = Some(aliases);
let extended = Plugins::new();
let output = merge_plugins_config_impl(base, extended);
assert!(output.aliases.is_some());
assert_eq!(output.aliases.unwrap().get("old").unwrap(), "new");
assert!(output.plugins.is_empty());
}
#[test]
fn merge_plugins_config_impl_base_aliases_none() {
let base = Plugins::new();
let mut extended = Plugins::new();
let mut aliases = IndexMap::new();
aliases.insert("old".to_string(), "new".to_string());
extended.aliases = Some(aliases);
let output = merge_plugins_config_impl(base, extended);
assert!(output.aliases.is_some());
assert_eq!(output.aliases.unwrap().get("old").unwrap(), "new");
assert!(output.plugins.is_empty());
}
#[test]
fn merge_plugins_config_impl_merge_all() {
let mut base = Plugins::new();
let mut extended = Plugins::new();
let mut aliases = IndexMap::new();
aliases.insert("base".to_string(), "basenew".to_string());
aliases.insert("test".to_string(), "base".to_string());
base.aliases = Some(aliases);
aliases = IndexMap::new();
aliases.insert("extended".to_string(), "extendednew".to_string());
aliases.insert("test".to_string(), "extended".to_string());
extended.aliases = Some(aliases);
let mut plugins = IndexMap::new();
plugins.insert(
"base".to_string(),
Plugin {
script: "base".to_string(),
},
);
plugins.insert(
"test".to_string(),
Plugin {
script: "test1".to_string(),
},
);
base.plugins = plugins;
plugins = IndexMap::new();
plugins.insert(
"extended".to_string(),
Plugin {
script: "extended".to_string(),
},
);
plugins.insert(
"test".to_string(),
Plugin {
script: "test2".to_string(),
},
);
extended.plugins = plugins;
let output = merge_plugins_config_impl(base, extended);
aliases = output.aliases.unwrap();
assert_eq!(aliases.len(), 3);
assert_eq!(aliases.get("base").unwrap(), "basenew");
assert_eq!(aliases.get("extended").unwrap(), "extendednew");
assert_eq!(aliases.get("test").unwrap(), "extended");
assert_eq!(output.plugins.len(), 3);
assert_eq!(output.plugins.get("base").unwrap().script, "base");
assert_eq!(output.plugins.get("extended").unwrap().script, "extended");
assert_eq!(output.plugins.get("test").unwrap().script, "test2");
}
#[test]
fn merge_plugins_config_none() {
let output = merge_plugins_config(None, None);
assert!(output.is_none());
}
#[test]
fn merge_plugins_config_extended_none() {
let mut base = Plugins::new();
let mut aliases = IndexMap::new();
aliases.insert("test".to_string(), "test".to_string());
base.aliases = Some(aliases);
let mut plugins = IndexMap::new();
plugins.insert(
"test".to_string(),
Plugin {
script: "test1".to_string(),
},
);
base.plugins = plugins;
let output = merge_plugins_config(Some(base), None);
assert!(output.is_some());
let plugins_wrapper = output.unwrap();
aliases = plugins_wrapper.aliases.unwrap();
assert_eq!(aliases.len(), 1);
assert_eq!(aliases.get("test").unwrap(), "test");
assert_eq!(plugins_wrapper.plugins.len(), 1);
assert_eq!(plugins_wrapper.plugins.get("test").unwrap().script, "test1");
}
#[test]
fn merge_plugins_config_base_none() {
let mut extended = Plugins::new();
let mut aliases = IndexMap::new();
aliases.insert("test".to_string(), "test".to_string());
extended.aliases = Some(aliases);
let mut plugins = IndexMap::new();
plugins.insert(
"test".to_string(),
Plugin {
script: "test1".to_string(),
},
);
extended.plugins = plugins;
let output = merge_plugins_config(None, Some(extended));
assert!(output.is_some());
let plugins_wrapper = output.unwrap();
aliases = plugins_wrapper.aliases.unwrap();
assert_eq!(aliases.len(), 1);
assert_eq!(aliases.get("test").unwrap(), "test");
assert_eq!(plugins_wrapper.plugins.len(), 1);
assert_eq!(plugins_wrapper.plugins.get("test").unwrap().script, "test1");
}
#[test]
fn merge_plugins_config_both_provided() {
let mut base = Plugins::new();
let mut extended = Plugins::new();
let mut aliases = IndexMap::new();
aliases.insert("base".to_string(), "1".to_string());
aliases.insert("test".to_string(), "1".to_string());
base.aliases = Some(aliases);
aliases = IndexMap::new();
aliases.insert("extended".to_string(), "1".to_string());
aliases.insert("test".to_string(), "2".to_string());
extended.aliases = Some(aliases);
let mut plugins = IndexMap::new();
plugins.insert(
"base".to_string(),
Plugin {
script: "1".to_string(),
},
);
plugins.insert(
"test".to_string(),
Plugin {
script: "1".to_string(),
},
);
base.plugins = plugins;
plugins = IndexMap::new();
plugins.insert(
"extended".to_string(),
Plugin {
script: "1".to_string(),
},
);
plugins.insert(
"test".to_string(),
Plugin {
script: "2".to_string(),
},
);
extended.plugins = plugins;
let output = merge_plugins_config(Some(base), Some(extended));
assert!(output.is_some());
let plugins_wrapper = output.unwrap();
aliases = plugins_wrapper.aliases.unwrap();
assert_eq!(aliases.len(), 3);
assert_eq!(aliases.get("base").unwrap(), "1");
assert_eq!(aliases.get("extended").unwrap(), "1");
assert_eq!(aliases.get("test").unwrap(), "2");
assert_eq!(plugins_wrapper.plugins.len(), 3);
assert_eq!(plugins_wrapper.plugins.get("base").unwrap().script, "1");
assert_eq!(plugins_wrapper.plugins.get("extended").unwrap().script, "1");
assert_eq!(plugins_wrapper.plugins.get("test").unwrap().script, "2");
}<|fim▁end|> | #[test]
fn merge_plugins_config_impl_extended_aliases_none() {
let mut base = Plugins::new(); |
<|file_name|>NetherLapis.java<|end_file_name|><|fim▁begin|>package com.camp.block;
import java.util.Random;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import com.camp.creativetabs.CreativeTabsManager;
import com.camp.item.ItemManager;
//import com.bedrockminer.tutorial.Main;
import com.camp.lib.StringLibrary;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class NetherLapis extends Block {
//"LapisItem" = new ItemStack(Items.dye, 1, 4);
private ItemStack drop;
private int meta;
private int least_quantity;
private int most_quantity;
//("gemLapis", new ItemStack(Items.dye, 1, 4)
protected NetherLapis(String unlocalizedName, Material mat, ItemStack gemLapisGem2, int meta, int least_quantity, int most_quantity) {
super(mat);
this.drop = gemLapisGem2;
this.meta = meta;
this.least_quantity = 1;
this.most_quantity = 2;
this.setLightLevel(0.0F);
this.setBlockName(unlocalizedName);
this.setBlockTextureName(StringLibrary.MODID + ":" + "nether_lapis");
this.setCreativeTab(CreativeTabsManager.tabBlock);
}
//ItemStack gemLapis = new ItemStack(Items.dye,1);
///ItemStack gemLapis = new ItemStack(Items.dye, 1, 4); setItemDamage(4);
ItemStack gemLapisGem = new ItemStack(Items.dye, 1, 4);
protected NetherLapis(String unlocalizedName, Material mat, ItemStack gemLapisGem2, int least_quantity, int most_quantity) {
this(unlocalizedName, mat, gemLapisGem2, 0, least_quantity, most_quantity);
}
protected NetherLapis(String unlocalizedName, Material mat, net.minecraft.item.ItemStack drop) {<|fim▁hole|> this(unlocalizedName, mat, drop, 1, 1);
}
public ItemStack ItemStack (int meta, Random random, int fortune) {
return gemLapisGem;
}
@Override
public int damageDropped(int meta) {
return meta;
}
@Override
public int quantityDropped(int meta, int fortune, Random random) {
if (this.least_quantity >= this.most_quantity)
return this.least_quantity;
return this.least_quantity + random.nextInt(this.most_quantity - this.least_quantity + fortune + 1);
}
}<|fim▁end|> | |
<|file_name|>search.rs<|end_file_name|><|fim▁begin|>extern crate cargo;
extern crate cargotest;
extern crate hamcrest;
extern crate url;
use std::fs::{self, File};
use std::io::prelude::*;
use std::path::PathBuf;
use cargo::util::ProcessBuilder;
use cargotest::support::execs;
use cargotest::support::git::repo;
use cargotest::support::paths;
use hamcrest::assert_that;
use url::Url;
fn registry_path() -> PathBuf { paths::root().join("registry") }
fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() }
fn api_path() -> PathBuf { paths::root().join("api") }
fn api() -> Url { Url::from_file_path(&*api_path()).ok().unwrap() }
fn setup() {
let config = paths::root().join(".cargo/config");
fs::create_dir_all(config.parent().unwrap()).unwrap();
File::create(&config).unwrap().write_all(format!(r#"
[registry]
index = "{reg}"<|fim▁hole|> repo(®istry_path())
.file("config.json", &format!(r#"{{
"dl": "{0}",
"api": "{0}"
}}"#, api()))
.build();
}
fn cargo_process(s: &str) -> ProcessBuilder {
let mut b = cargotest::cargo_process();
b.arg(s);
return b
}
#[test]
fn simple() {
setup();
let contents = r#"{
"crates": [{
"created_at": "2014-11-16T20:17:35Z",
"description": "Design by contract style assertions for Rust",
"documentation": null,
"downloads": 2,
"homepage": null,
"id": "hoare",
"keywords": [],
"license": null,
"links": {
"owners": "/api/v1/crates/hoare/owners",
"reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies",
"version_downloads": "/api/v1/crates/hoare/downloads",
"versions": "/api/v1/crates/hoare/versions"
},
"max_version": "0.1.1",
"name": "hoare",
"repository": "https://github.com/nick29581/libhoare",
"updated_at": "2014-11-20T21:49:21Z",
"versions": null
}],
"meta": {
"total": 1
}
}"#;
let base = api_path().join("api/v1/crates");
// Older versions of curl don't peel off query parameters when looking for
// filenames, so just make both files.
//
// On windows, though, `?` is an invalid character, but we always build curl
// from source there anyway!
File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap();
if !cfg!(windows) {
File::create(&base.with_file_name("crates?q=postgres&per_page=10")).unwrap()
.write_all(contents.as_bytes()).unwrap();
}
assert_that(cargo_process("search").arg("postgres"),
execs().with_status(0)
.with_stderr("\
[UPDATING] registry `[..]`")
.with_stdout("\
hoare (0.1.1) Design by contract style assertions for Rust"));
}
#[test]
fn multiple_query_params() {
setup();
let contents = r#"{
"crates": [{
"created_at": "2014-11-16T20:17:35Z",
"description": "Design by contract style assertions for Rust",
"documentation": null,
"downloads": 2,
"homepage": null,
"id": "hoare",
"keywords": [],
"license": null,
"links": {
"owners": "/api/v1/crates/hoare/owners",
"reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies",
"version_downloads": "/api/v1/crates/hoare/downloads",
"versions": "/api/v1/crates/hoare/versions"
},
"max_version": "0.1.1",
"name": "hoare",
"repository": "https://github.com/nick29581/libhoare",
"updated_at": "2014-11-20T21:49:21Z",
"versions": null
}],
"meta": {
"total": 1
}
}"#;
let base = api_path().join("api/v1/crates");
// Older versions of curl don't peel off query parameters when looking for
// filenames, so just make both files.
//
// On windows, though, `?` is an invalid character, but we always build curl
// from source there anyway!
File::create(&base).unwrap().write_all(contents.as_bytes()).unwrap();
if !cfg!(windows) {
File::create(&base.with_file_name("crates?q=postgres+sql&per_page=10")).unwrap()
.write_all(contents.as_bytes()).unwrap();
}
assert_that(cargo_process("search").arg("postgres").arg("sql"),
execs().with_status(0)
.with_stderr("\
[UPDATING] registry `[..]`")
.with_stdout("\
hoare (0.1.1) Design by contract style assertions for Rust"));
}
#[test]
fn help() {
assert_that(cargo_process("search").arg("-h"),
execs().with_status(0));
assert_that(cargo_process("help").arg("search"),
execs().with_status(0));
}<|fim▁end|> | "#, reg = registry()).as_bytes()).unwrap();
fs::create_dir_all(&api_path().join("api/v1")).unwrap();
|
<|file_name|>blockio.cc<|end_file_name|><|fim▁begin|>/*
* blockio.cc
*
*
*/
#define _LARGEFILE_SOURCE
#define _FILE_OFFSET_BITS 64
#include "version.h"
#include "blockio.h"
#include "osutils.h"
#include <stdio.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <stdint.h>
__ID("@(#) $Id: blockio.cc 2069 2009-02-12 22:53:09Z lyonel $");
ssize_t readlogicalblocks(source & s,
void * buffer,
long long pos, long long count)
{
long long result = 0;
memset(buffer, 0, count*s.blocksize);
/* attempt to read past the end of the section */
if((s.size>0) && ((pos+count)*s.blocksize>s.size)) return 0;
result = lseek(s.fd, s.offset + pos*s.blocksize, SEEK_SET);
if(result == -1) return 0;
result = read(s.fd, buffer, count*s.blocksize);
<|fim▁hole|>}<|fim▁end|> | if(result!=count*s.blocksize)
return 0;
else
return count; |
<|file_name|>serializer.py<|end_file_name|><|fim▁begin|>"""
Serialize data to/from JSON
"""
# Avoid shadowing the standard library json module
from __future__ import absolute_import, unicode_literals
import datetime
import decimal
import json
import sys
import uuid
from io import BytesIO
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import (
Deserializer as PythonDeserializer, Serializer as PythonSerializer,
)
from django.core.serializers.json import DjangoJSONEncoder
from django.utils import six
from django.utils.timezone import is_aware
class Serializer(PythonSerializer):
"""
Convert a queryset to JSON.
"""
internal_use_only = False
def _init_options(self):
if json.__version__.split('.') >= ['2', '1', '3']:
# Use JS strings to represent Python Decimal instances (ticket #16850)
self.options.update({'use_decimal': False})
self._current = None
self.json_kwargs = self.options.copy()
self.json_kwargs.pop('stream', None)
self.json_kwargs.pop('fields', None)
def start_serialization(self):
self._init_options()
def end_serialization(self):
'''
Do nothing
'''
def end_object(self, obj):
# self._current has the field data
json.dump(self.get_dump_object(obj), self.stream,
cls=DjangoJSONEncoder, **self.json_kwargs)
self.stream.write('\n')
self._current = None
def getvalue(self):
# Grand-parent super
return super(PythonSerializer, self).getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
if isinstance(stream_or_string, (bytes, six.string_types)):<|fim▁hole|> try:
def line_generator():
for line in stream_or_string:
yield json.loads(line.strip())
for obj in PythonDeserializer(line_generator(), **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
six.reraise(DeserializationError, DeserializationError(e), sys.exc_info()[2])<|fim▁end|> | stream_or_string = BytesIO(stream_or_string) |
<|file_name|>Webhook.cpp<|end_file_name|><|fim▁begin|>// PabstMirror
// async send msg to a discord webhook
// Requires CPR (curl wrapper) - https://github.com/whoshuu/cpr (and put libcurl.dll in base arma folder)
#include <iostream>
#include <String>
#include <future>
#include "cpr/cpr.h"
constexpr auto VERSION_STR = "v1.0.1";
<|fim▁hole|>extern "C" {
__declspec(dllexport) void __stdcall RVExtension(char* output, int outputSize, const char* function);
__declspec(dllexport) void __stdcall RVExtensionVersion(char* output, int outputSize);
__declspec (dllexport) void __stdcall RVExtensionRegisterCallback(int(*callbackProc)(char const* name, char const* function, char const* data));
}
std::function<int(char const*, char const*, char const*)> callbackPtr = [](char const*, char const*, char const*) { return 0; };
std::future<void> fWorker;
void __stdcall RVExtensionRegisterCallback(int(*callbackProc)(char const* name, char const* function, char const* data)) {
callbackPtr = callbackProc;
}
void postThread(const char * msg) {
cpr::Response r = cpr::Post(
cpr::Url{ "https://discordapp.com/api/webhooks/x/y" }, // don't commit this lol
cpr::Payload{ {"content", msg}, {"username", "POTATO"} }
);
std::stringstream outputStr;
outputStr << "Finished with code [" << r.status_code << "]"; // 200/204 is good, 400 is bad
callbackPtr("POTATO_webhook", "Webhook", outputStr.str().c_str());
}
void __stdcall RVExtensionVersion(char* output, int outputSize) {
strncpy(output, VERSION_STR, outputSize);
}
void __stdcall RVExtension(char* output, int outputSize, const char* function) {
if (!strcmp(function, "version")) {
RVExtensionVersion(output, outputSize);
return;
}
if (fWorker.valid()) { fWorker.wait_for(std::chrono::seconds(1)); } // if worker is busy wait for finish
fWorker = std::async(std::launch::async, &postThread, function); // start async so we don't block arma (~200ms to finish)
strncpy(output, "Dispatched Webhook", outputSize);
}<|fim▁end|> | |
<|file_name|>hocNotification.js<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * hocNotification
*
*/
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { compose, setPropTypes } from 'recompose';
import { createStructuredSelector } from 'reselect';
import { selectNotifications } from 'features/common_ui/selectors';
const mapStateToProps = createStructuredSelector({
notifications: selectNotifications(),
});
const sliderPropsType = setPropTypes({
notifications: PropTypes.oneOfType([PropTypes.array, PropTypes.object]).isRequired,
});
const hocNotification = compose(connect(mapStateToProps), sliderPropsType);
export default hocNotification;<|fim▁end|> | * |
<|file_name|>ProtocolDaoOjb.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.irb;
import org.kuali.kra.irb.actions.submit.ProtocolSubmission;
import org.kuali.kra.irb.personnel.ProtocolPerson;
import org.kuali.kra.irb.personnel.ProtocolUnit;
import org.kuali.kra.irb.protocol.funding.ProtocolFundingSource;
import org.kuali.kra.irb.protocol.location.ProtocolLocation;
import org.kuali.kra.irb.protocol.research.ProtocolResearchArea;
import org.kuali.kra.protocol.CriteriaFieldHelper;
import org.kuali.kra.protocol.ProtocolBase;
import org.kuali.kra.protocol.ProtocolDaoOjbBase;
import org.kuali.kra.protocol.ProtocolLookupConstants;
import org.kuali.kra.protocol.actions.ProtocolActionBase;
import org.kuali.kra.protocol.actions.submit.ProtocolSubmissionBase;
import org.kuali.kra.protocol.personnel.ProtocolPersonBase;
import org.kuali.kra.protocol.personnel.ProtocolUnitBase;
import org.kuali.rice.krad.service.util.OjbCollectionAware;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
*
* This class is the implementation for ProtocolDao interface.
*/
class ProtocolDaoOjb extends ProtocolDaoOjbBase<Protocol> implements OjbCollectionAware, ProtocolDao {
/**
* The APPROVED_SUBMISSION_STATUS_CODE contains the status code of approved protocol submissions (i.e. 203).
*/
private static final Collection<String> APPROVED_SUBMISSION_STATUS_CODES = Arrays.asList(new String[] {"203"});
/**
* The ACTIVE_PROTOCOL_STATUS_CODES contains the various active status codes for a protocol.
* <li> 200 - Active, open to enrollment
* <li> 201 - Active, closed to enrollment
* <li> 202 - Active, data analysis only
*/
private static final Collection<String> ACTIVE_PROTOCOL_STATUS_CODES = Arrays.asList(new String[] {"200", "201", "202"});
/**
* The REVISION_REQUESTED_PROTOCOL_ACTION_TYPE_CODES contains the protocol action codes for the protocol revision requests.
* <li> 202 - Specific Minor Revision
* <li> 203 - Substantive Revision Requested <|fim▁hole|>
/**
* The REVISION_REQUESTED_PROTOCOL_STATUS_CODES contains the various status codes for protocol revision requests.
* <li> 102 - Specific Minor Revision
* <li> 104 - Substantive Revision Requested
*/
private static final Collection<String> REVISION_REQUESTED_PROTOCOL_STATUS_CODES = Arrays.asList(new String[] {"102", "104"});
private static final Collection<String> PENDING_AMENDMENT_RENEWALS_STATUS_CODES = Arrays.asList(new String[]{"100", "101", "102", "103", "104", "105", "106"});
@Override
protected Collection<String> getApprovedSubmissionStatusCodesHook() {
return APPROVED_SUBMISSION_STATUS_CODES;
}
@Override
protected Collection<String> getActiveProtocolStatusCodesHook() {
return ACTIVE_PROTOCOL_STATUS_CODES;
}
@Override
protected Collection<String> getRevisionRequestedProtocolActionTypeCodesHook() {
return REVISION_REQUESTED_PROTOCOL_ACTION_TYPE_CODES;
}
@Override
protected Collection<String> getRevisionRequestedProtocolStatusCodesHook() {
return REVISION_REQUESTED_PROTOCOL_STATUS_CODES;
}
@Override
protected Class<? extends ProtocolActionBase> getProtocolActionBOClassHoook() {
return org.kuali.kra.irb.actions.ProtocolAction.class;
}
@Override
protected void initRoleListsHook(List<String> investigatorRoles, List<String> personRoles) {
investigatorRoles.add("PI");
investigatorRoles.add("COI");
personRoles.add("SP");
personRoles.add("CA");
personRoles.add("CRC");
}
@Override
protected Collection<String> getPendingAmendmentRenewalsProtocolStatusCodesHook() {
return PENDING_AMENDMENT_RENEWALS_STATUS_CODES;
}
@Override
protected Class<? extends ProtocolBase> getProtocolBOClassHook() {
return Protocol.class;
}
@Override
protected Class<? extends ProtocolPersonBase> getProtocolPersonBOClassHook() {
return ProtocolPerson.class;
}
@Override
protected Class<? extends ProtocolUnitBase> getProtocolUnitBOClassHook() {
return ProtocolUnit.class;
}
@Override
protected Class<? extends ProtocolSubmissionBase> getProtocolSubmissionBOClassHook() {
return ProtocolSubmission.class;
}
@Override
protected List<CriteriaFieldHelper> getCriteriaFields() {
List<CriteriaFieldHelper> criteriaFields = new ArrayList<CriteriaFieldHelper>();
criteriaFields.add(new CriteriaFieldHelper(ProtocolLookupConstants.Property.KEY_PERSON,
ProtocolLookupConstants.Property.PERSON_NAME,
ProtocolPerson.class));
criteriaFields.add(new CriteriaFieldHelper(ProtocolLookupConstants.Property.INVESTIGATOR,
ProtocolLookupConstants.Property.PERSON_NAME,
ProtocolPerson.class));
criteriaFields.add(new CriteriaFieldHelper(ProtocolLookupConstants.Property.FUNDING_SOURCE,
ProtocolLookupConstants.Property.FUNDING_SOURCE_NUMBER,
ProtocolFundingSource.class));
criteriaFields.add(new CriteriaFieldHelper(ProtocolLookupConstants.Property.PERFORMING_ORGANIZATION_ID,
ProtocolLookupConstants.Property.ORGANIZATION_ID,
ProtocolLocation.class));
criteriaFields.add(new CriteriaFieldHelper(ProtocolLookupConstants.Property.RESEARCH_AREA_CODE,
ProtocolLookupConstants.Property.RESEARCH_AREA_CODE,
ProtocolResearchArea.class));
return criteriaFields;
}
}<|fim▁end|> | */
private static final Collection<String> REVISION_REQUESTED_PROTOCOL_ACTION_TYPE_CODES = Arrays.asList(new String[] {"202", "203"}); |
<|file_name|>TypeMismatchException.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2002-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans;
import java.beans.PropertyChangeEvent;
import org.springframework.util.ClassUtils;
/**
* Exception thrown on a type mismatch when trying to set a bean property.<|fim▁hole|> */
public class TypeMismatchException extends PropertyAccessException {
/**
* Error code that a type mismatch error will be registered with.
*/
public static final String ERROR_CODE = "typeMismatch";
private transient Object value;
private Class requiredType;
/**
* Create a new TypeMismatchException.
* @param propertyChangeEvent the PropertyChangeEvent that resulted in the problem
* @param requiredType the required target type
*/
public TypeMismatchException(PropertyChangeEvent propertyChangeEvent, Class requiredType) {
this(propertyChangeEvent, requiredType, null);
}
/**
* Create a new TypeMismatchException.
* @param propertyChangeEvent the PropertyChangeEvent that resulted in the problem
* @param requiredType the required target type (or <code>null</code> if not known)
* @param cause the root cause (may be <code>null</code>)
*/
public TypeMismatchException(PropertyChangeEvent propertyChangeEvent, Class requiredType, Throwable cause) {
super(propertyChangeEvent,
"Failed to convert property value of type [" +
ClassUtils.getDescriptiveType(propertyChangeEvent.getNewValue()) + "]" +
(requiredType != null ?
" to required type [" + ClassUtils.getQualifiedName(requiredType) + "]" : "") +
(propertyChangeEvent.getPropertyName() != null ?
" for property '" + propertyChangeEvent.getPropertyName() + "'" : ""),
cause);
this.value = propertyChangeEvent.getNewValue();
this.requiredType = requiredType;
}
/**
* Create a new TypeMismatchException without PropertyChangeEvent.
* @param value the offending value that couldn't be converted (may be <code>null</code>)
* @param requiredType the required target type (or <code>null</code> if not known)
*/
public TypeMismatchException(Object value, Class requiredType) {
this(value, requiredType, null);
}
/**
* Create a new TypeMismatchException without PropertyChangeEvent.
* @param value the offending value that couldn't be converted (may be <code>null</code>)
* @param requiredType the required target type (or <code>null</code> if not known)
* @param cause the root cause (may be <code>null</code>)
*/
public TypeMismatchException(Object value, Class requiredType, Throwable cause) {
super("Failed to convert value of type [" + ClassUtils.getDescriptiveType(value) + "]" +
(requiredType != null ? " to required type [" + ClassUtils.getQualifiedName(requiredType) + "]" : ""),
cause);
this.value = value;
this.requiredType = requiredType;
}
/**
* Return the offending value (may be <code>null</code>)
*/
public Object getValue() {
return this.value;
}
/**
* Return the required target type, if any.
*/
public Class getRequiredType() {
return this.requiredType;
}
public String getErrorCode() {
return ERROR_CODE;
}
}<|fim▁end|> | *
* @author Rod Johnson
* @author Juergen Hoeller |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code, unused_variables)]
use std::thread::sleep_ms;
mod xcb;
fn main() {
let c = xcb::connect().unwrap();
let iter = c.screen_iterator();
println!("abc");
for screen in iter {
println!("width {}", screen.width());
c.bar(screen);
}
sleep_ms(10000);<|fim▁hole|>}
// vim: filetype=rust:expandtab:shiftwidth=4:tabstop=8:softtabstop=4:textwidth=80<|fim▁end|> | |
<|file_name|>ReponseNormal.java<|end_file_name|><|fim▁begin|>package com.hypebeast.sdk.api.model.hypebeaststore;
import com.google.gson.annotations.SerializedName;
import com.hypebeast.sdk.api.model.Alternative;
import com.hypebeast.sdk.api.model.symfony.taxonomy;
/**
* Created by hesk on 7/1/2015.
*/
public class ReponseNormal extends Alternative {
@SerializedName("products")
public ResponseProductList product_list;
@SerializedName("taxon")<|fim▁hole|><|fim▁end|> | public taxonomy taxon_result;
} |
<|file_name|>footer.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { Page } from '../objects/page';
import { NavigationService } from '../services/navigation.service';
@Component({
selector: 'my-footer',
moduleId: module.id,
templateUrl: '../views/footer.component.html'
})
export class FooterComponent implements OnInit
{
year: string;<|fim▁hole|>
ngOnInit(): void {
//get pages
this.getPages();
//get year
this.year = new Date().getFullYear().toString();
}
getPages(): void {
this.navigationService.getPages().then(pages=> this.pages = pages);
}
onSelect(page): void {
this.selectedPage = page;
}
}<|fim▁end|> | pages: Page[];
selectedPage: Page;
constructor(private navigationService: NavigationService) { } |
<|file_name|>test_views.py<|end_file_name|><|fim▁begin|>import json
from django.test.utils import override_settings
import pytest
from pyquery import PyQuery
from fjord.base import views
from fjord.base.tests import (
LocalizingClient,
TestCase,
AnalyzerProfileFactory,
reverse
)
from fjord.base.views import IntentionalException
from fjord.search.tests import ElasticTestCase
class TestAbout(TestCase):
client_class = LocalizingClient
def test_about_view(self):
resp = self.client.get(reverse('about-view'))
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'about.html')
class TestLoginFailure(TestCase):
def test_login_failure_view(self):
resp = self.client.get(reverse('login-failure'))
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'login_failure.html')
resp = self.client.get(reverse('login-failure'), {'mobile': 1})
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'mobile/login_failure.html')
# Note: This needs to be an ElasticTestCase because the view does ES
# stuff.
class TestMonitorView(ElasticTestCase):
def test_monitor_view(self):
"""Tests for the monitor view."""
# TODO: When we add a mocking framework, we can mock this
# properly.
test_memcached = views.test_memcached
try:
with self.settings(
SHOW_STAGE_NOTICE=True,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', # noqa
'LOCATION': ['localhost:11211', 'localhost2:11211']
}
}):
# Mock the test_memcached function so it always returns
# True.
views.test_memcached = lambda host, port: True
# TODO: Replace when we get a mock library.
def mock_rabbitmq():
class MockRabbitMQ(object):
def connect(self):
return True
return lambda *a, **kw: MockRabbitMQ()
views.establish_connection = mock_rabbitmq()
# Request /services/monitor and make sure it returns
# HTTP 200 and that there aren't errors on the page.
resp = self.client.get(reverse('services-monitor'))
errors = [line for line in resp.content.splitlines()
if 'ERROR' in line]
assert resp.status_code == 200, '%s != %s (%s)' % (
resp.status_code, 200, repr(errors))
finally:
views.test_memcached = test_memcached
class TestFileNotFound(TestCase):
client_class = LocalizingClient
def test_404(self):
request = self.client.get('/a/path/that/should/never/exist')
assert request.status_code == 404
self.assertTemplateUsed(request, '404.html')
class TestServerError(TestCase):
@override_settings(SHOW_STAGE_NOTICE=True)
def test_500(self):
with pytest.raises(IntentionalException):
self.client.get('/services/throw-error')
class TestRobots(TestCase):
def test_robots(self):
resp = self.client.get('/robots.txt')
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'robots.txt')
class TestContribute(TestCase):
def test_contribute(self):
resp = self.client.get('/contribute.json')
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'contribute.json')
def test_contribute_if_valid_json(self):
resp = self.client.get('/contribute.json')
# json.loads throws a ValueError when contribute.json is invalid JSON.
json.loads(resp.content)
class TestNewUserView(ElasticTestCase):
def setUp(self):
super(TestNewUserView, self).setUp()
jane = AnalyzerProfileFactory().user
self.jane = jane
def test_redirect_to_dashboard_if_anonymous(self):
# AnonymousUser shouldn't get to the new-user-view, so make
# sure they get redirected to the dashboard.
resp = self.client.get(reverse('new-user-view'), follow=True)
assert resp.status_code == 200
self.assertTemplateNotUsed('new_user.html')
self.assertTemplateUsed('analytics/dashboard.html')
def test_default_next_url(self):
self.client_login_user(self.jane)
resp = self.client.get(reverse('new-user-view'))
assert resp.status_code == 200
self.assertTemplateUsed('new_user.html')
# Pull out next link
pq = PyQuery(resp.content)
next_url = pq('#next-url-link')
assert next_url.attr['href'] == '/en-US/' # this is the dashboard
def test_valid_next_url(self):
self.client_login_user(self.jane)
url = reverse('new-user-view')
resp = self.client.get(url, {
'next': '/ou812' # stretches the meaning of 'valid'
})
assert resp.status_code == 200
self.assertTemplateUsed('new_user.html')
# Pull out next link which is naughty, so it should have been
# replaced with a dashboard link.
pq = PyQuery(resp.content)
next_url = pq('#next-url-link')
assert next_url.attr['href'] == '/ou812'
def test_sanitized_next_url(self):
self.client_login_user(self.jane)
url = reverse('new-user-view')
resp = self.client.get(url, {
'next': 'javascript:prompt%28document.cookie%29'
})
assert resp.status_code == 200
self.assertTemplateUsed('new_user.html')
<|fim▁hole|> assert next_url.attr['href'] == '/en-US/' # this is the dashboard<|fim▁end|> | # Pull out next link which is naughty, so it should have been
# replaced with a dashboard link.
pq = PyQuery(resp.content)
next_url = pq('#next-url-link') |
<|file_name|>sign.rs<|end_file_name|><|fim▁begin|>// Copyright 2017-2021 int08h LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//!
//! A multi-step (init-update-finish) interface for Ed25519 signing and verification
//!
use std::fmt;
use std::fmt::Formatter;
use data_encoding::{Encoding, HEXLOWER_PERMISSIVE};
use ring::rand;
use ring::rand::SecureRandom;
use ring::signature::{self, Ed25519KeyPair, KeyPair};
const HEX: Encoding = HEXLOWER_PERMISSIVE;
const INITIAL_BUF_SIZE: usize = 1024;
/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature
#[derive(Debug)]
pub struct Verifier {
pubkey: Vec<u8>,
buf: Vec<u8>,
}
impl Verifier {
pub fn new(pubkey: &[u8]) -> Self {
Verifier {
pubkey: Vec::from(pubkey),
buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn verify(&self, expected_sig: &[u8]) -> bool {
let pk = signature::UnparsedPublicKey::new(&signature::ED25519, &self.pubkey);
match pk.verify(&self.buf, expected_sig) {
Ok(_) => true,
_ => false,
}
}
}
/// A multi-step (init-update-finish) interface for creating an Ed25519 signature
pub struct Signer {
key_pair: Ed25519KeyPair,
buf: Vec<u8>,
}
impl Default for Signer {
fn default() -> Self {
Self::new()
}
}
impl Signer {
pub fn new() -> Self {
let rng = rand::SystemRandom::new();
let mut seed = [0u8; 32];
rng.fill(&mut seed).unwrap();
Signer::from_seed(&seed)
}
pub fn from_seed(seed: &[u8]) -> Self {
Signer {
key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),<|fim▁hole|> buf: Vec::with_capacity(INITIAL_BUF_SIZE),
}
}
pub fn update(&mut self, data: &[u8]) {
self.buf.reserve(data.len());
self.buf.extend_from_slice(data);
}
pub fn sign(&mut self) -> Vec<u8> {
let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();
self.buf.clear();
signature
}
pub fn public_key_bytes(&self) -> &[u8] {
self.key_pair.public_key().as_ref()
}
}
impl fmt::Display for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", HEX.encode(self.public_key_bytes()))
}
}
impl fmt::Debug for Signer {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Signer({}, {:?})",
HEX.encode(self.public_key_bytes()),
self.buf
)
}
}
#[rustfmt::skip] // rustfmt errors on the long signature strings
#[cfg(test)]
mod test {
use super::*;
#[test]
fn verify_ed25519_sig_on_empty_message() {
let pubkey = hex::decode(
"d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a",
).unwrap();
let signature = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let v = Verifier::new(&pubkey);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn verify_ed25519_sig() {
let pubkey = hex::decode(
"c0dac102c4533186e25dc43128472353eaabdb878b152aeb8e001f92d90233a7",
).unwrap();
let message = hex::decode("5f4c8989").unwrap();
let signature = hex::decode(
"124f6fc6b0d100842769e71bd530664d888df8507df6c56dedfdb509aeb93416e26b918d38aa06305df3095697c18b2aa832eaa52edc0ae49fbae5a85e150c07"
).unwrap();
let mut v = Verifier::new(&pubkey);
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
#[test]
fn sign_ed25519_empty_message() {
let seed = hex::decode("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60")
.unwrap();
let expected_sig = hex::decode(
"e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"
).unwrap();
let mut s = Signer::from_seed(&seed);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_ed25519_message() {
let seed = hex::decode("0d4a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = hex::decode("cbc77b").unwrap();
let expected_sig = hex::decode(
"d9868d52c2bebce5f3fa5a79891970f309cb6591e3e1702a70276fa97c24b3a8e58606c38c9758529da50ee31b8219cba45271c689afa60b0ea26c99db19b00c"
).unwrap();
let mut s = Signer::from_seed(&seed);
s.update(&message);
let sig = s.sign();
assert_eq!(sig, expected_sig);
}
#[test]
fn sign_verify_round_trip() {
let seed = hex::decode("334a05b07352a5436e180356da0ae6efa0345ff7fb1572575772e8005ed978e9")
.unwrap();
let message = "Hello world".as_bytes();
let mut signer = Signer::from_seed(&seed);
signer.update(&message);
let signature = signer.sign();
let mut v = Verifier::new(signer.public_key_bytes());
v.update(&message);
let result = v.verify(&signature);
assert_eq!(result, true);
}
}<|fim▁end|> | |
<|file_name|>power_of_2.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::basic::integers::PrimitiveInt;
use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base::num::conversion::traits::CheckedFrom;
use malachite_base_test_util::generators::{
signed_gen_var_11, unsigned_gen_var_15, unsigned_gen_var_16,
};
use std::panic::catch_unwind;
fn power_of_2_primitive_int_helper<T: PrimitiveInt>() {
let test = |pow, out| {
assert_eq!(T::power_of_2(pow), out);
};
test(0, T::ONE);
test(1, T::TWO);
test(2, T::exact_from(4));
test(3, T::exact_from(8));
}
fn power_of_2_unsigned_helper<T: PrimitiveUnsigned>() {
let test = |pow, out| {
assert_eq!(T::power_of_2(pow), out);
};
test(T::WIDTH - 1, T::ONE << (T::WIDTH - 1));
}
fn power_of_2_primitive_float_helper<T: PrimitiveFloat>() {
let test = |pow, out| {
assert_eq!(T::power_of_2(pow), out);
};
test(0, T::ONE);
test(1, T::TWO);
test(-1, T::from(0.5f32));
test(2, T::from(4.0f32));
test(-2, T::from(0.25f32));
test(T::MIN_EXPONENT, T::MIN_POSITIVE_SUBNORMAL);
}
#[test]
fn test_power_of_2() {
apply_fn_to_primitive_ints!(power_of_2_primitive_int_helper);
apply_fn_to_unsigneds!(power_of_2_unsigned_helper);
apply_fn_to_primitive_floats!(power_of_2_primitive_float_helper);
}
fn power_of_2_unsigned_fail_helper<T: PrimitiveUnsigned>() {
assert_panic!(T::power_of_2(T::WIDTH));
}
fn power_of_2_signed_fail_helper<T: PrimitiveSigned>() {
assert_panic!(T::power_of_2(T::WIDTH - 1));
}
fn power_of_2_primitive_float_fail_helper<T: PrimitiveFloat>() {
assert_panic!(T::power_of_2(T::MAX_EXPONENT + 1));
assert_panic!(T::power_of_2(T::MIN_EXPONENT - 1));
assert_panic!(T::power_of_2(10000));
assert_panic!(T::power_of_2(-10000));
}
#[test]
fn power_of_2_fail() {
apply_fn_to_unsigneds!(power_of_2_unsigned_fail_helper);
apply_fn_to_signeds!(power_of_2_signed_fail_helper);
apply_fn_to_primitive_floats!(power_of_2_primitive_float_fail_helper);
}
fn power_of_2_properties_helper_unsigned<T: PrimitiveUnsigned>() {
unsigned_gen_var_15::<T>().test_properties(|pow| {
let mut n = T::power_of_2(pow);
assert_eq!(n.checked_log_base_2(), Some(pow));
assert!(n.is_power_of_2());
n.clear_bit(pow);
assert_eq!(n, T::ZERO);
});
}
fn power_of_2_properties_helper_signed<
U: CheckedFrom<S> + PrimitiveUnsigned,
S: PrimitiveSigned,
>() {
unsigned_gen_var_16::<S>().test_properties(|pow| {
let mut n = S::power_of_2(pow);
assert_eq!(U::exact_from(n), U::power_of_2(pow));
n.clear_bit(pow);
assert_eq!(n, S::ZERO);
});
}
fn power_of_2_properties_helper_primitive_float<T: PrimitiveFloat>() {
signed_gen_var_11::<T>().test_properties(|pow| {
let n = T::power_of_2(pow);
assert!(n > T::ZERO);
assert!(n.is_power_of_2());
});
}<|fim▁hole|> apply_fn_to_unsigned_signed_pairs!(power_of_2_properties_helper_signed);
apply_fn_to_primitive_floats!(power_of_2_properties_helper_primitive_float);
}<|fim▁end|> |
#[test]
fn power_of_2_properties() {
apply_fn_to_unsigneds!(power_of_2_properties_helper_unsigned); |
<|file_name|>crc.cc<|end_file_name|><|fim▁begin|>//-----------------------------------------------------------------------------
// Copyright (c) 2013 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/platform.h"
#include "io/stream.h"
#ifndef _MMATH_H_
#include "math/mMath.h"
#endif
//-----------------------------------------------------------------------------
// simple crc function - generates lookup table on first call
static U32 crcTable[256];
static bool crcTableValid;
static void calculateCRCTable()
{
<|fim▁hole|> for(S32 i = 0; i < 256; i++)
{
val = i;
for(S32 j = 0; j < 8; j++)
{
if(val & 0x01)
val = 0xedb88320 ^ (val >> 1);
else
val = val >> 1;
}
crcTable[i] = val;
}
crcTableValid = true;
}
//-----------------------------------------------------------------------------
U32 calculateCRC(const void * buffer, S32 len, U32 crcVal )
{
// check if need to generate the crc table
if(!crcTableValid)
calculateCRCTable();
// now calculate the crc
char * buf = (char*)buffer;
for(S32 i = 0; i < len; i++)
crcVal = crcTable[(crcVal ^ buf[i]) & 0xff] ^ (crcVal >> 8);
return(crcVal);
}
U32 calculateCRCStream(Stream *stream, U32 crcVal )
{
// check if need to generate the crc table
if(!crcTableValid)
calculateCRCTable();
// now calculate the crc
stream->setPosition(0);
S32 len = stream->getStreamSize();
U8 buf[4096];
S32 segCount = (len + 4095) / 4096;
for(S32 j = 0; j < segCount; j++)
{
S32 slen = getMin(4096, len - (j * 4096));
stream->read(slen, buf);
crcVal = calculateCRC(buf, slen, crcVal);
}
stream->setPosition(0);
return(crcVal);
}<|fim▁end|> | U32 val;
|
<|file_name|>MenuController.java<|end_file_name|><|fim▁begin|>package org.cleverframe.sys.controller;
import org.cleverframe.common.controller.BaseController;
import org.cleverframe.common.mapper.BeanMapper;
import org.cleverframe.common.persistence.Page;
import org.cleverframe.common.vo.response.AjaxMessage;
import org.cleverframe.sys.SysBeanNames;
import org.cleverframe.sys.SysJspUrlPath;
import org.cleverframe.sys.entity.Menu;
import org.cleverframe.sys.service.MenuService;
import org.cleverframe.sys.vo.request.*;
import org.cleverframe.webui.easyui.data.DataGridJson;
import org.cleverframe.webui.easyui.data.TreeGridNodeJson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Controller;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.validation.Valid;
import java.util.List;
import java.util.Map;
/**
* Controller
* <p>
* 作者:LiZW <br/>
* 创建时间:2016-08-24 22:47:29 <br/>
*/
@SuppressWarnings("MVCPathVariableInspection")
@Controller
@RequestMapping(value = "/${base.mvcPath}/sys/menu")
public class MenuController extends BaseController {
@Autowired
@Qualifier(SysBeanNames.MenuService)
private MenuService menuService;
@RequestMapping("/Menu" + VIEW_PAGE_SUFFIX)
public ModelAndView getMenuJsp(HttpServletRequest request, HttpServletResponse response) {
return new ModelAndView(SysJspUrlPath.Menu);
}
/**
* 分页查询
*/
@RequestMapping("/findByPage")
@ResponseBody
public DataGridJson<Menu> findByPage(
HttpServletRequest request,
HttpServletResponse response,
@Valid MenuQueryVo menuQueryVo,
BindingResult bindingResult) {
DataGridJson<Menu> json = new DataGridJson<>();
Page<Menu> page = menuService.findByPage(new Page<>(request, response), menuQueryVo.getMenuType(), menuQueryVo.getName(), menuQueryVo.getOpenMode());
json.setRows(page.getList());
json.setTotal(page.getCount());<|fim▁hole|>
/**
* 查询所有菜单类型
*/
@RequestMapping("/findAllMenuType")
@ResponseBody
public List<Map<String, Object>> findAllMenuType(HttpServletRequest request, HttpServletResponse response) {
return menuService.findAllMenuType();
}
/**
* 查询菜单树
*/
@RequestMapping("/findMenuTreeByType")
@ResponseBody
public Object findMenuTreeByType(
HttpServletRequest request,
HttpServletResponse response,
@Valid MenuTreeQueryVo menuTreeQueryVo,
BindingResult bindingResult) {
AjaxMessage<String> message = new AjaxMessage<>(true, "查询菜单树成功", null);
if (!beanValidator(bindingResult, message)) {
return message;
}
DataGridJson<TreeGridNodeJson<Menu>> treeGrid = new DataGridJson<>();
List<Menu> menuList = menuService.findMenuByType(menuTreeQueryVo.getMenuType());
for (Menu menu : menuList) {
TreeGridNodeJson<Menu> node = new TreeGridNodeJson<>(menu.getParentId(), menu);
treeGrid.addRow(node);
}
return treeGrid;
}
/**
* 增加菜单信息
*/
@RequestMapping("/addMenu")
@ResponseBody
public AjaxMessage<String> addMenu(
HttpServletRequest request,
HttpServletResponse response,
@Valid MenuAddVo menuAddVo,
BindingResult bindingResult) {
AjaxMessage<String> message = new AjaxMessage<>(true, "新增菜单成功", null);
if (beanValidator(bindingResult, message)) {
Menu menu = BeanMapper.mapper(menuAddVo, Menu.class);
menuService.saveMenu(message, menu);
}
return message;
}
/**
* 更新菜单信息
*/
@RequestMapping("/updateMenu")
@ResponseBody
public AjaxMessage<String> updateMenu(
HttpServletRequest request,
HttpServletResponse response,
@Valid MenuUpdateVo menuUpdateVo,
BindingResult bindingResult) {
AjaxMessage<String> message = new AjaxMessage<>(true, "更新菜单成功", null);
if (beanValidator(bindingResult, message)) {
Menu menu = BeanMapper.mapper(menuUpdateVo, Menu.class);
if (!menuService.updateMenu(menu)) {
message.setSuccess(false);
message.setFailMessage("更新菜单失败");
}
}
return message;
}
/**
* 删除菜单
*/
@RequestMapping("/deleteMenu")
@ResponseBody
public AjaxMessage<String> deleteMenu(
HttpServletRequest request,
HttpServletResponse response,
@Valid MenuDeleteVo menuDeleteVoe,
BindingResult bindingResult) {
AjaxMessage<String> message = new AjaxMessage<>(true, "删除菜单成功", null);
if (beanValidator(bindingResult, message)) {
menuService.deleteMenu(message, menuDeleteVoe.getId());
}
return message;
}
}<|fim▁end|> | return json;
} |
<|file_name|>test_socket.rs<|end_file_name|><|fim▁begin|>use nix::sys::socket::{InetAddr, UnixAddr, getsockname};
use std::mem;
use std::net::{self, Ipv6Addr, SocketAddr, SocketAddrV6};
use std::path::Path;
use std::str::FromStr;
use std::os::unix::io::RawFd;
use libc::c_char;
#[test]
pub fn test_inetv4_addr_to_sock_addr() {
let actual: net::SocketAddr = FromStr::from_str("127.0.0.1:3000").unwrap();
let addr = InetAddr::from_std(&actual);
match addr {
InetAddr::V4(addr) => {
let ip: u32 = 0x7f000001;
let port: u16 = 3000;
assert_eq!(addr.sin_addr.s_addr, ip.to_be());
assert_eq!(addr.sin_port, port.to_be());
}
_ => panic!("nope"),
}
assert_eq!(addr.to_str(), "127.0.0.1:3000");
let inet = addr.to_std();
assert_eq!(actual, inet);
}
#[test]
pub fn test_inetv6_addr_to_sock_addr() {
let port: u16 = 3000;
let flowinfo: u32 = 1;
let scope_id: u32 = 2;
let ip: Ipv6Addr = "fe80::1".parse().unwrap();
let actual = SocketAddr::V6(SocketAddrV6::new(ip, port, flowinfo, scope_id));
let addr = InetAddr::from_std(&actual);
match addr {
InetAddr::V6(addr) => {
assert_eq!(addr.sin6_port, port.to_be());
assert_eq!(addr.sin6_flowinfo, flowinfo);
assert_eq!(addr.sin6_scope_id, scope_id);
}
_ => panic!("nope"),
}
assert_eq!(actual, addr.to_std());
}
#[test]
pub fn test_path_to_sock_addr() {
let actual = Path::new("/foo/bar");
let addr = UnixAddr::new(actual).unwrap();
let expect: &'static [c_char] = unsafe { mem::transmute(&b"/foo/bar"[..]) };
assert_eq!(&addr.0.sun_path[..8], expect);
assert_eq!(addr.path(), Some(actual));
}
#[test]
pub fn test_getsockname() {
use nix::sys::socket::{socket, AddressFamily, SockType, SockFlag};
use nix::sys::socket::{bind, SockAddr};
use tempdir::TempDir;
let tempdir = TempDir::new("test_getsockname").unwrap();
let sockname = tempdir.path().join("sock");
let sock = socket(AddressFamily::Unix, SockType::Stream, SockFlag::empty(),
0).expect("socket failed");
let sockaddr = SockAddr::new_unix(&sockname).unwrap();
bind(sock, &sockaddr).expect("bind failed");
assert_eq!(sockaddr.to_str(),
getsockname(sock).expect("getsockname failed").to_str());
}
#[test]
pub fn test_socketpair() {
use nix::unistd::{read, write};
use nix::sys::socket::{socketpair, AddressFamily, SockType, SockFlag};
let (fd1, fd2) = socketpair(AddressFamily::Unix, SockType::Stream, 0,
SockFlag::empty())
.unwrap();
write(fd1, b"hello").unwrap();
let mut buf = [0;5];
read(fd2, &mut buf).unwrap();
assert_eq!(&buf[..], b"hello");
}
#[test]
pub fn test_scm_rights() {
use nix::sys::uio::IoVec;
use nix::unistd::{pipe, read, write, close};
use nix::sys::socket::{socketpair, sendmsg, recvmsg,
AddressFamily, SockType, SockFlag,
ControlMessage, CmsgSpace, MsgFlags,
MSG_TRUNC, MSG_CTRUNC};
let (fd1, fd2) = socketpair(AddressFamily::Unix, SockType::Stream, 0,
SockFlag::empty())
.unwrap();
let (r, w) = pipe().unwrap();
let mut received_r: Option<RawFd> = None;
{
let iov = [IoVec::from_slice(b"hello")];
let fds = [r];
let cmsg = ControlMessage::ScmRights(&fds);
assert_eq!(sendmsg(fd1, &iov, &[cmsg], MsgFlags::empty(), None).unwrap(), 5);
close(r).unwrap();
close(fd1).unwrap();
}
{
let mut buf = [0u8; 5];
let iov = [IoVec::from_mut_slice(&mut buf[..])];
let mut cmsgspace: CmsgSpace<[RawFd; 1]> = CmsgSpace::new();
let msg = recvmsg(fd2, &iov, Some(&mut cmsgspace), MsgFlags::empty()).unwrap();<|fim▁hole|> assert_eq!(fd.len(), 1);
received_r = Some(fd[0]);
} else {
panic!("unexpected cmsg");
}
}
assert_eq!(msg.flags & (MSG_TRUNC | MSG_CTRUNC), MsgFlags::empty());
close(fd2).unwrap();
}
let received_r = received_r.expect("Did not receive passed fd");
// Ensure that the received file descriptor works
write(w, b"world").unwrap();
let mut buf = [0u8; 5];
read(received_r, &mut buf).unwrap();
assert_eq!(&buf[..], b"world");
close(received_r).unwrap();
close(w).unwrap();
}
// Test creating and using named unix domain sockets
#[test]
pub fn test_unixdomain() {
use nix::sys::socket::{AddressFamily, SockType, SockFlag};
use nix::sys::socket::{bind, socket, connect, listen, accept, SockAddr};
use nix::unistd::{read, write, close};
use std::thread;
use tempdir::TempDir;
let tempdir = TempDir::new("test_unixdomain").unwrap();
let sockname = tempdir.path().join("sock");
let s1 = socket(AddressFamily::Unix, SockType::Stream,
SockFlag::empty(), 0).expect("socket failed");
let sockaddr = SockAddr::new_unix(&sockname).unwrap();
bind(s1, &sockaddr).expect("bind failed");
listen(s1, 10).expect("listen failed");
let thr = thread::spawn(move || {
let s2 = socket(AddressFamily::Unix, SockType::Stream,
SockFlag::empty(), 0).expect("socket failed");
connect(s2, &sockaddr).expect("connect failed");
write(s2, b"hello").expect("write failed");
close(s2).unwrap();
});
let s3 = accept(s1).expect("accept failed");
let mut buf = [0;5];
read(s3, &mut buf).unwrap();
close(s3).unwrap();
close(s1).unwrap();
thr.join().unwrap();
assert_eq!(&buf[..], b"hello");
}
// Test creating and using named system control sockets
#[cfg(any(target_os = "macos", target_os = "ios"))]
#[test]
pub fn test_syscontrol() {
use nix::{Errno, Error};
use nix::sys::socket::{AddressFamily, SockType, SockFlag};
use nix::sys::socket::{socket, SockAddr};
use nix::sys::socket::SYSPROTO_CONTROL;
let fd = socket(AddressFamily::System, SockType::Datagram, SockFlag::empty(), SYSPROTO_CONTROL).expect("socket failed");
let _sockaddr = SockAddr::new_sys_control(fd, "com.apple.net.utun_control", 0).expect("resolving sys_control name failed");
assert_eq!(SockAddr::new_sys_control(fd, "foo.bar.lol", 0).err(), Some(Error::Sys(Errno::ENOENT)));
// requires root privileges
// connect(fd, &sockaddr).expect("connect failed");
}<|fim▁end|> |
for cmsg in msg.cmsgs() {
if let ControlMessage::ScmRights(fd) = cmsg {
assert_eq!(received_r, None); |
<|file_name|>test_pcf.py<|end_file_name|><|fim▁begin|>"""Tests for parabolic cylinder functions.
"""
import numpy as np
from numpy.testing import assert_allclose, assert_equal
import scipy.special as sc
def test_pbwa_segfault():
# Regression test for https://github.com/scipy/scipy/issues/6208.
#
# Data generated by mpmath.
#
w = 1.02276567211316867161
wp = -0.48887053372346189882<|fim▁hole|> # Check that NaN's are returned outside of the range in which the
# implementation is accurate.
pts = [(-6, -6), (-6, 6), (6, -6), (6, 6)]
for p in pts:
assert_equal(sc.pbwa(*p), (np.nan, np.nan))<|fim▁end|> | assert_allclose(sc.pbwa(0, 0), (w, wp), rtol=1e-13, atol=0)
def test_pbwa_nan(): |
<|file_name|>start-service.js<|end_file_name|><|fim▁begin|>Package('{Name}.Services', {
Bootstrap : new Class({
implements: ['exportService', 'importService', 'getUserId'],
initialize : function()
{
this.serviceName = 'bootstrap';
this.exportServices = [this.serviceName];
this.importServices = []
SYMPHONY.services.make(this.serviceName, this, this.implements, true);
SAPPHIRE.application.listen('start', this.onStart.bind(this));
SAPPHIRE.application.listen('ready', this.onReady.bind(this));
},
exportService : function(name)
{
this.exportServices.push(name);
},
importService : function(name)
{
this.importServices.push(name);
},
getUserId : function()
{
return this.userId;
},
onStart : function(done)<|fim▁hole|> .then(function(data) {
done();
}.bind(this))
},
onReady : function()
{
return SYMPHONY.application.register({NAME}.appId, this.importServices.unique(), this.exportServices.unique())
.then(function(response)
{
this.userId = response.userReferenceId;
{NAME}.events.fire('start');
}.bind(this))
.done();
},
})
});
new {Name}.Services.Bootstrap();<|fim▁end|> | {
SYMPHONY.remote.hello() |
<|file_name|>inferno-component.node.js<|end_file_name|><|fim▁begin|>/*!
* inferno-component v1.2.2
* (c) 2017 Dominic Gannaway
* Released under the MIT License.
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('inferno')) :
typeof define === 'function' && define.amd ? define(['inferno'], factory) :
(global.Inferno = global.Inferno || {}, global.Inferno.Component = factory(global.Inferno));
}(this, (function (inferno) { 'use strict';
var ERROR_MSG = 'a runtime error occured! Use Inferno in development environment to find the error.';
var isBrowser = typeof window !== 'undefined' && window.document;
// this is MUCH faster than .constructor === Array and instanceof Array
// in Node 7 and the later versions of V8, slower in older versions though
var isArray = Array.isArray;
function isStringOrNumber(obj) {
var type = typeof obj;
return type === 'string' || type === 'number';
}
function isNullOrUndef(obj) {
return isUndefined(obj) || isNull(obj);
}
function isInvalid(obj) {
return isNull(obj) || obj === false || isTrue(obj) || isUndefined(obj);
}
function isFunction(obj) {
return typeof obj === 'function';
}
function isNull(obj) {
return obj === null;
}
function isTrue(obj) {
return obj === true;
}
function isUndefined(obj) {
return obj === undefined;
}
function throwError(message) {
if (!message) {
message = ERROR_MSG;
}
throw new Error(("Inferno Error: " + message));
}
var Lifecycle = function Lifecycle() {
this.listeners = [];
this.fastUnmount = true;
};
Lifecycle.prototype.addListener = function addListener (callback) {
this.listeners.push(callback);
};
Lifecycle.prototype.trigger = function trigger () {
var this$1 = this;
for (var i = 0; i < this.listeners.length; i++) {
this$1.listeners[i]();
}
};
var noOp = ERROR_MSG;
if (process.env.NODE_ENV !== 'production') {
noOp = 'Inferno Error: Can only update a mounted or mounting component. This usually means you called setState() or forceUpdate() on an unmounted component. This is a no-op.';
}
var componentCallbackQueue = new Map();
// when a components root VNode is also a component, we can run into issues
// this will recursively look for vNode.parentNode if the VNode is a component
function updateParentComponentVNodes(vNode, dom) {
if (vNode.flags & 28 /* Component */) {
var parentVNode = vNode.parentVNode;
if (parentVNode) {
parentVNode.dom = dom;
updateParentComponentVNodes(parentVNode, dom);
}
}
}
// this is in shapes too, but we don't want to import from shapes as it will pull in a duplicate of createVNode
function createVoidVNode() {
return inferno.createVNode(4096 /* Void */);
}
function createTextVNode(text) {
return inferno.createVNode(1 /* Text */, null, null, text);
}
function addToQueue(component, force, callback) {
// TODO this function needs to be revised and improved on
var queue = componentCallbackQueue.get(component);
if (!queue) {
queue = [];
componentCallbackQueue.set(component, queue);
Promise.resolve().then(function () {
componentCallbackQueue.delete(component);
applyState(component, force, function () {
for (var i = 0; i < queue.length; i++) {
queue[i]();
}
});
});
}
if (callback) {
queue.push(callback);
}
}
function queueStateChanges(component, newState, callback, sync) {
if (isFunction(newState)) {
newState = newState(component.state, component.props, component.context);
}
for (var stateKey in newState) {
component._pendingState[stateKey] = newState[stateKey];
}
if (!component._pendingSetState && isBrowser) {
if (sync || component._blockRender) {
component._pendingSetState = true;
applyState(component, false, callback);
}
else {
addToQueue(component, false, callback);
}
}
else {
component.state = Object.assign({}, component.state, component._pendingState);
component._pendingState = {};
}
}
function applyState(component, force, callback) {
if ((!component._deferSetState || force) && !component._blockRender && !component._unmounted) {
component._pendingSetState = false;
var pendingState = component._pendingState;
var prevState = component.state;
var nextState = Object.assign({}, prevState, pendingState);
var props = component.props;
var context = component.context;
component._pendingState = {};
var nextInput = component._updateComponent(prevState, nextState, props, props, context, force, true);
var didUpdate = true;
if (isInvalid(nextInput)) {
nextInput = createVoidVNode();
}
else if (nextInput === inferno.NO_OP) {
nextInput = component._lastInput;
didUpdate = false;
}
else if (isStringOrNumber(nextInput)) {
nextInput = createTextVNode(nextInput);
}
else if (isArray(nextInput)) {
if (process.env.NODE_ENV !== 'production') {
throwError('a valid Inferno VNode (or null) must be returned from a component render. You may have returned an array or an invalid object.');
}
throwError();
}
var lastInput = component._lastInput;
var vNode = component._vNode;
var parentDom = (lastInput.dom && lastInput.dom.parentNode) || (lastInput.dom = vNode.dom);
component._lastInput = nextInput;
if (didUpdate) {
var subLifecycle = component._lifecycle;
if (!subLifecycle) {
subLifecycle = new Lifecycle();
}
else {
subLifecycle.listeners = [];
}
component._lifecycle = subLifecycle;
var childContext = component.getChildContext();
if (!isNullOrUndef(childContext)) {
childContext = Object.assign({}, context, component._childContext, childContext);
}
else {
childContext = Object.assign({}, context, component._childContext);
}
component._patch(lastInput, nextInput, parentDom, subLifecycle, childContext, component._isSVG, false);
subLifecycle.trigger();
component.componentDidUpdate(props, prevState);
inferno.options.afterUpdate && inferno.options.afterUpdate(vNode);
}
var dom = vNode.dom = nextInput.dom;
var componentToDOMNodeMap = component._componentToDOMNodeMap;
componentToDOMNodeMap && componentToDOMNodeMap.set(component, nextInput.dom);
updateParentComponentVNodes(vNode, dom);
if (!isNullOrUndef(callback)) {
callback();
}
}
else if (!isNullOrUndef(callback)) {
callback();
}
}
var Component$1 = function Component(props, context) {
this.state = {};
this.refs = {};
this._blockRender = false;
this._ignoreSetState = false;
this._blockSetState = false;
this._deferSetState = false;
this._pendingSetState = false;
this._pendingState = {};
this._lastInput = null;
this._vNode = null;
this._unmounted = true;
this._lifecycle = null;
this._childContext = null;
this._patch = null;
this._isSVG = false;
this._componentToDOMNodeMap = null;
/** @type {object} */
this.props = props || inferno.EMPTY_OBJ;
/** @type {object} */
this.context = context || {};
};
Component$1.prototype.render = function render (nextProps, nextState, nextContext) {
};
Component$1.prototype.forceUpdate = function forceUpdate (callback) {
if (this._unmounted) {
return;
}
isBrowser && applyState(this, true, callback);
};
Component$1.prototype.setState = function setState (newState, callback) {
if (this._unmounted) {
return;
}
if (!this._blockSetState) {
if (!this._ignoreSetState) {
queueStateChanges(this, newState, callback, false);
}
}
else {
if (process.env.NODE_ENV !== 'production') {
throwError('cannot update state via setState() in componentWillUpdate().');
}
throwError();
}
};
Component$1.prototype.setStateSync = function setStateSync (newState) {
if (this._unmounted) {
return;
}
if (!this._blockSetState) {
if (!this._ignoreSetState) {
queueStateChanges(this, newState, null, true);
}
}
else {
if (process.env.NODE_ENV !== 'production') {
throwError('cannot update state via setState() in componentWillUpdate().');
}
throwError();
}
};
Component$1.prototype.componentWillMount = function componentWillMount () {
};
Component$1.prototype.componentDidUpdate = function componentDidUpdate (prevProps, prevState, prevContext) {
};
Component$1.prototype.shouldComponentUpdate = function shouldComponentUpdate (nextProps, nextState, context) {
return true;
};
Component$1.prototype.componentWillReceiveProps = function componentWillReceiveProps (nextProps, context) {
};
Component$1.prototype.componentWillUpdate = function componentWillUpdate (nextProps, nextState, nextContext) {
};
Component$1.prototype.getChildContext = function getChildContext () {
};
Component$1.prototype._updateComponent = function _updateComponent (prevState, nextState, prevProps, nextProps, context, force, fromSetState) {
if (this._unmounted === true) {
if (process.env.NODE_ENV !== 'production') {
throwError(noOp);
}
throwError();
}
if ((prevProps !== nextProps || nextProps === inferno.EMPTY_OBJ) || prevState !== nextState || force) {
if (prevProps !== nextProps || nextProps === inferno.EMPTY_OBJ) {
if (!fromSetState) {
this._blockRender = true;
this.componentWillReceiveProps(nextProps, context);
this._blockRender = false;
}
if (this._pendingSetState) {
nextState = Object.assign({}, nextState, this._pendingState);
this._pendingSetState = false;
this._pendingState = {};
}
}
var shouldUpdate = this.shouldComponentUpdate(nextProps, nextState, context);
if (shouldUpdate !== false || force) {
this._blockSetState = true;
this.componentWillUpdate(nextProps, nextState, context);
this._blockSetState = false;
this.props = nextProps;
var state = this.state = nextState;
this.context = context;
inferno.options.beforeRender && inferno.options.beforeRender(this);
var render = this.render(nextProps, state, context);
inferno.options.afterRender && inferno.options.afterRender(this);
return render;
}
}
return inferno.NO_OP;
};
return Component$1;<|fim▁hole|><|fim▁end|> |
}))); |
<|file_name|>test_linked_list.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import unittest
from linked_list import (delete_node, list_cycle, remove_elements,
reverse_list)
from public import ListNode
class TestLinkedList(unittest.TestCase):
def test_delete_node(self):
so = delete_node.Solution()
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
head.next.next.next = ListNode(4)
so.deleteNode(head.next)
self.assertEqual(head.next.val, 3)
def test_has_cycle(self):
so = list_cycle.Solution()
self.assertFalse(so.hasCycle(None))
head = ListNode(1)
self.assertFalse(so.hasCycle(head))
head.next = head
self.assertTrue(so.hasCycle(head))
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertFalse(so.hasCycle(head))
head.next.next.next = head
self.assertTrue(so.hasCycle(head))
def test_detect_cycle(self):
so = list_cycle.Solution()
head = ListNode(1)
self.assertFalse(so.detectCycle(head))
self.assertFalse(so.detectCycle(None))
head.next = ListNode(2)
self.assertFalse(so.detectCycle(head))
cross = ListNode(3)
head.next.next = cross
head.next.next.next = ListNode(4)
head.next.next.next.next = ListNode(5)
head.next.next.next.next.next = cross
self.assertEqual(so.detectCycle(head), cross)
def test_remove_elements(self):
so = remove_elements.Solution()
self.assertFalse(so.removeElements(None, 0))
<|fim▁hole|> head.next = ListNode(2)
head.next.next = ListNode(2)
head.next.next.next = ListNode(3)
head.next.next.next.next = ListNode(4)
head = so.removeElements(head, 1)
self.assertEqual(head.val, 2)
head = so.removeElements(head, 2)
self.assertEqual(head.val, 3)
head = so.removeElements(head, 4)
self.assertFalse(head.next)
def test_reverse_linked_list(self):
so = reverse_list.Solution()
self.assertFalse(so.reverseList_iteratively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertEqual(so.reverseList_iteratively(head).val, 3)
self.assertFalse(so.reverseList_recursively(None))
head = ListNode(1)
head.next = ListNode(2)
head.next.next = ListNode(3)
self.assertEqual(so.reverseList_recursively(head).val, 3)<|fim▁end|> | head = ListNode(1) |
<|file_name|>rpath.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashSet;
use std::env;
use std::path::{Path, PathBuf};
use std::fs;
use syntax::ast;
pub struct RPathConfig<'a> {
pub used_crates: Vec<(ast::CrateNum, Option<PathBuf>)>,
pub out_filename: PathBuf,
pub is_like_osx: bool,
pub has_rpath: bool,
pub get_install_prefix_lib_path: &'a mut FnMut() -> PathBuf,
}
pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
// No rpath on windows
if !config.has_rpath {
return Vec::new();
}
let mut flags = Vec::new();
debug!("preparing the RPATH!");
let libs = config.used_crates.clone();
let libs = libs.into_iter().filter_map(|(_, l)| l).collect::<Vec<_>>();
let rpaths = get_rpaths(config, &libs[..]);
flags.push_all(&rpaths_to_flags(&rpaths[..]));
flags
}
fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
let mut ret = Vec::new();
for rpath in rpaths {
ret.push(format!("-Wl,-rpath,{}", &(*rpath)));
}
return ret;
}
fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec<String> {
debug!("output: {:?}", config.out_filename.display());
debug!("libs:");
for libpath in libs {
debug!(" {:?}", libpath.display());
}
// Use relative paths to the libraries. Binaries can be moved
// as long as they maintain the relative relationship to the
// crates they depend on.
let rel_rpaths = get_rpaths_relative_to_output(config, libs);
// And a final backup rpath to the global library location.
let fallback_rpaths = vec!(get_install_prefix_rpath(config));
fn log_rpaths(desc: &str, rpaths: &[String]) {
debug!("{} rpaths:", desc);
for rpath in rpaths {
debug!(" {}", *rpath);
}
}
log_rpaths("relative", &rel_rpaths[..]);
log_rpaths("fallback", &fallback_rpaths[..]);
let mut rpaths = rel_rpaths;
rpaths.push_all(&fallback_rpaths[..]);
// Remove duplicates
let rpaths = minimize_rpaths(&rpaths[..]);
return rpaths;
}
fn get_rpaths_relative_to_output(config: &mut RPathConfig,
libs: &[PathBuf]) -> Vec<String> {
libs.iter().map(|a| get_rpath_relative_to_output(config, a)).collect()
}
<|fim▁hole|> } else {
"$ORIGIN"
};
let cwd = env::current_dir().unwrap();
let mut lib = fs::canonicalize(&cwd.join(lib)).unwrap_or(cwd.join(lib));
lib.pop();
let mut output = cwd.join(&config.out_filename);
output.pop();
let output = fs::canonicalize(&output).unwrap_or(output);
let relative = path_relative_from(&lib, &output)
.expect(&format!("couldn't create relative path from {:?} to {:?}", output, lib));
// FIXME (#9639): This needs to handle non-utf8 paths
format!("{}/{}", prefix,
relative.to_str().expect("non-utf8 component in path"))
}
// This routine is adapted from the *old* Path's `path_relative_from`
// function, which works differently from the new `relative_from` function.
// In particular, this handles the case on unix where both paths are
// absolute but with only the root as the common directory.
fn path_relative_from(path: &Path, base: &Path) -> Option<PathBuf> {
use std::path::Component;
if path.is_absolute() != base.is_absolute() {
if path.is_absolute() {
Some(PathBuf::from(path))
} else {
None
}
} else {
let mut ita = path.components();
let mut itb = base.components();
let mut comps: Vec<Component> = vec![];
loop {
match (ita.next(), itb.next()) {
(None, None) => break,
(Some(a), None) => {
comps.push(a);
comps.extend(ita.by_ref());
break;
}
(None, _) => comps.push(Component::ParentDir),
(Some(a), Some(b)) if comps.is_empty() && a == b => (),
(Some(a), Some(b)) if b == Component::CurDir => comps.push(a),
(Some(_), Some(b)) if b == Component::ParentDir => return None,
(Some(a), Some(_)) => {
comps.push(Component::ParentDir);
for _ in itb {
comps.push(Component::ParentDir);
}
comps.push(a);
comps.extend(ita.by_ref());
break;
}
}
}
Some(comps.iter().map(|c| c.as_os_str()).collect())
}
}
fn get_install_prefix_rpath(config: &mut RPathConfig) -> String {
let path = (config.get_install_prefix_lib_path)();
let path = env::current_dir().unwrap().join(&path);
// FIXME (#9639): This needs to handle non-utf8 paths
path.to_str().expect("non-utf8 component in rpath").to_string()
}
fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
let mut set = HashSet::new();
let mut minimized = Vec::new();
for rpath in rpaths {
if set.insert(&rpath[..]) {
minimized.push(rpath.clone());
}
}
minimized
}
#[cfg(all(unix, test))]
mod tests {
use super::{RPathConfig};
use super::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output};
use std::path::{Path, PathBuf};
#[test]
fn test_rpaths_to_flags() {
let flags = rpaths_to_flags(&[
"path1".to_string(),
"path2".to_string()
]);
assert_eq!(flags,
["-Wl,-rpath,path1",
"-Wl,-rpath,path2"]);
}
#[test]
fn test_minimize1() {
let res = minimize_rpaths(&[
"rpath1".to_string(),
"rpath2".to_string(),
"rpath1".to_string()
]);
assert!(res == [
"rpath1",
"rpath2",
]);
}
#[test]
fn test_minimize2() {
let res = minimize_rpaths(&[
"1a".to_string(),
"2".to_string(),
"2".to_string(),
"1a".to_string(),
"4a".to_string(),
"1a".to_string(),
"2".to_string(),
"3".to_string(),
"4a".to_string(),
"3".to_string()
]);
assert!(res == [
"1a",
"2",
"4a",
"3",
]);
}
#[test]
fn test_rpath_relative() {
if cfg!(target_os = "macos") {
let config = &mut RPathConfig {
used_crates: Vec::new(),
has_rpath: true,
is_like_osx: true,
out_filename: PathBuf::from("bin/rustc"),
get_install_prefix_lib_path: &mut || panic!(),
};
let res = get_rpath_relative_to_output(config,
Path::new("lib/libstd.so"));
assert_eq!(res, "@loader_path/../lib");
} else {
let config = &mut RPathConfig {
used_crates: Vec::new(),
out_filename: PathBuf::from("bin/rustc"),
get_install_prefix_lib_path: &mut || panic!(),
has_rpath: true,
is_like_osx: false,
};
let res = get_rpath_relative_to_output(config,
Path::new("lib/libstd.so"));
assert_eq!(res, "$ORIGIN/../lib");
}
}
}<|fim▁end|> | fn get_rpath_relative_to_output(config: &mut RPathConfig, lib: &Path) -> String {
// Mac doesn't appear to support $ORIGIN
let prefix = if config.is_like_osx {
"@loader_path" |
<|file_name|>token.py<|end_file_name|><|fim▁begin|>"""
Token class definition
"""
class Token(object):
"""Represents a token with type and value"""
def __init__(self, type, value):
self._type = type
self._value = value
<|fim▁hole|> def type(self):
return self._type
@property
def value(self):
return self._value<|fim▁end|> | @property |
<|file_name|>constants.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import sys
from inspect import getmembers
from ._hoedown import lib
def _set_constants():
is_int = lambda n: isinstance(n, int)
for name, value in getmembers(lib, is_int):
if not name.startswith('HOEDOWN_'):
continue
setattr(sys.modules[__name__], name[8:], value)
if not hasattr(sys.modules[__name__], 'EXT_TABLES'):<|fim▁hole|><|fim▁end|> | _set_constants() |
<|file_name|>login.js<|end_file_name|><|fim▁begin|>Template.login.events({
'submit form': function(){
event.preventDefault();
var email = event.target.email.value;
var password = event.target.password.value;
//error handling
Meteor.loginWithPassword(email, password, function(error){
if (error) {
alert(error.reason);
} else{
Router.go('/');
};<|fim▁hole|><|fim▁end|> | });
}
}); |
<|file_name|>alg_tower_of_hanoi.py<|end_file_name|><|fim▁begin|>"""The tower of Hanoi."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def tower_of_hanoi(height, from_pole, to_pole, with_pole, counter):
"""Tower of Hanoi.
Time complexity: T(1) = 1, T(n) = 2T(n - 1) + 1 => O(2^n).
Space complexity: O(1).
"""
if height == 1:
counter[0] += 1
print('{0} -> {1}'.format(from_pole, to_pole))
else:
tower_of_hanoi(height - 1, from_pole, with_pole, to_pole, counter)
tower_of_hanoi(1, from_pole, to_pole, with_pole, counter)
tower_of_hanoi(height - 1, with_pole, to_pole, from_pole, counter)
def main():
from_pole = 'A'
to_pole = 'B'
with_pole = 'C'
height = 1
counter = [0]
print('height: {}'.format(height))
tower_of_hanoi(height, from_pole, to_pole, with_pole, counter)
print('counter: {}'.format(counter[0]))
height = 2
counter = [0]
print('height: {}'.format(height))
tower_of_hanoi(height, from_pole, to_pole, with_pole, counter)
print('counter: {}'.format(counter[0]))
<|fim▁hole|> height = 5
counter = [0]
print('height: {}'.format(height))
tower_of_hanoi(height, from_pole, to_pole, with_pole, counter)
print('counter: {}'.format(counter[0]))
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>test2.out.rs<|end_file_name|><|fim▁begin|>fn should_format() {
println!("Test", "Test2", "Test3");
}<|fim▁hole|>
fn shouldnt_format() {
println!("Test");
println!("Test");
println!("Test");
println!("Test");
}
fn should_format() {
println!("Test", "Test2", "Test3");
}<|fim▁end|> | |
<|file_name|>index.transform.js<|end_file_name|><|fim▁begin|>'use strict';
var BigNumber = require('../../type/BigNumber');
var Range = require('../../type/Range');
var Index = require('../../type/Index');
var isNumber = require('../../util/number').isNumber;
/**
* Attach a transform function to math.index
* Adds a property transform containing the transform function.
*
* This transform creates a one-based index instead of a zero-based index
* @param {Object} math
*/
module.exports = function (math) {
var transform = function () {
var args = [];
for (var i = 0, ii = arguments.length; i < ii; i++) {
var arg = arguments[i];
// change from one-based to zero based, and convert BigNumber to number
if (arg instanceof Range) {
arg.start--;
arg.end -= (arg.step > 0 ? 0 : 2);
}
else if (isNumber(arg)) {
arg--;
}
else if (arg instanceof BigNumber) {
arg = arg.toNumber() - 1;
}
else {
throw new TypeError('Ranges must be a Number or Range');
}
args[i] = arg;
}
var res = new Index();
Index.apply(res, args);<|fim▁hole|>
return transform;
};<|fim▁end|> | return res;
};
math.index.transform = transform; |
<|file_name|>test_ccode.py<|end_file_name|><|fim▁begin|>from sympy.core import pi, oo, symbols, Function, Rational, Integer, GoldenRatio, EulerGamma, Catalan, Lambda, Dummy, Eq
from sympy.functions import Piecewise, sin, cos, Abs, exp, ceiling, sqrt, gamma
from sympy.utilities.pytest import raises
from sympy.printing.ccode import CCodePrinter
from sympy.utilities.lambdify import implemented_function
from sympy.tensor import IndexedBase, Idx
# import test
from sympy import ccode
x, y, z = symbols('x,y,z')
g = Function('g')
def test_printmethod():
class fabs(Abs):
def _ccode(self, printer):
return "fabs(%s)" % printer._print(self.args[0])
assert ccode(fabs(x)) == "fabs(x)"
def test_ccode_sqrt():
assert ccode(sqrt(x)) == "sqrt(x)"
assert ccode(x**0.5) == "sqrt(x)"
assert ccode(sqrt(x)) == "sqrt(x)"
def test_ccode_Pow():
assert ccode(x**3) == "pow(x, 3)"
assert ccode(x**(y**3)) == "pow(x, pow(y, 3))"
assert ccode(1/(g(x)*3.5)**(x - y**x)/(x**2 + y)) == \
"pow(3.5*g(x), -x + pow(y, x))/(pow(x, 2) + y)"
assert ccode(x**-1.0) == '1.0/x'
assert ccode(x**Rational(2, 3)) == 'pow(x, 2.0L/3.0L)'
_cond_cfunc = [(lambda base, exp: exp.is_integer, "dpowi"),
(lambda base, exp: not exp.is_integer, "pow")]
assert ccode(x**3, user_functions={'Pow': _cond_cfunc}) == 'dpowi(x, 3)'
assert ccode(x**3.2, user_functions={'Pow': _cond_cfunc}) == 'pow(x, 3.2)'
def test_ccode_constants_mathh():
assert ccode(exp(1)) == "M_E"
assert ccode(pi) == "M_PI"
assert ccode(oo) == "HUGE_VAL"
assert ccode(-oo) == "-HUGE_VAL"
def test_ccode_constants_other():
assert ccode(2*GoldenRatio) == "double const GoldenRatio = 1.61803398874989;\n2*GoldenRatio"
assert ccode(
2*Catalan) == "double const Catalan = 0.915965594177219;\n2*Catalan"
assert ccode(2*EulerGamma) == "double const EulerGamma = 0.577215664901533;\n2*EulerGamma"
def test_ccode_Rational():
assert ccode(Rational(3, 7)) == "3.0L/7.0L"
assert ccode(Rational(18, 9)) == "2"
assert ccode(Rational(3, -7)) == "-3.0L/7.0L"
assert ccode(Rational(-3, -7)) == "3.0L/7.0L"
assert ccode(x + Rational(3, 7)) == "x + 3.0L/7.0L"
assert ccode(Rational(3, 7)*x) == "(3.0L/7.0L)*x"
def test_ccode_Integer():
assert ccode(Integer(67)) == "67"
assert ccode(Integer(-1)) == "-1"
def test_ccode_functions():
assert ccode(sin(x) ** cos(x)) == "pow(sin(x), cos(x))"
def test_ccode_inline_function():
x = symbols('x')
g = implemented_function('g', Lambda(x, 2*x))
assert ccode(g(x)) == "2*x"
g = implemented_function('g', Lambda(x, 2*x/Catalan))
assert ccode(
g(x)) == "double const Catalan = %s;\n2*x/Catalan" % Catalan.n()
A = IndexedBase('A')
i = Idx('i', symbols('n', integer=True))
g = implemented_function('g', Lambda(x, x*(1 + x)*(2 + x)))
assert ccode(g(A[i]), assign_to=A[i]) == (
"for (int i=0; i<n; i++){\n"
" A[i] = (A[i] + 1)*(A[i] + 2)*A[i];\n"
"}"
)
def test_ccode_exceptions():
assert ccode(ceiling(x)) == "ceil(x)"
assert ccode(Abs(x)) == "fabs(x)"
assert ccode(gamma(x)) == "tgamma(x)"
def test_ccode_user_functions():
x = symbols('x', integer=False)
n = symbols('n', integer=True)
custom_functions = {
"ceiling": "ceil",
"Abs": [(lambda x: not x.is_integer, "fabs"), (lambda x: x.is_integer, "abs")],
}
assert ccode(ceiling(x), user_functions=custom_functions) == "ceil(x)"
assert ccode(Abs(x), user_functions=custom_functions) == "fabs(x)"
assert ccode(Abs(n), user_functions=custom_functions) == "abs(n)"
def test_ccode_boolean():
assert ccode(x & y) == "x && y"
assert ccode(x | y) == "x || y"
assert ccode(~x) == "!x"
assert ccode(x & y & z) == "x && y && z"
assert ccode(x | y | z) == "x || y || z"
assert ccode((x & y) | z) == "z || x && y"
assert ccode((x | y) & z) == "z && (x || y)"
def test_ccode_Piecewise():
p = ccode(Piecewise((x, x < 1), (x**2, True)))
s = \
"""\
if (x < 1) {
x
}
else {
pow(x, 2)
}\
"""
assert p == s
def test_ccode_Piecewise_deep():
p = ccode(2*Piecewise((x, x < 1), (x**2, True)))
s = \
"""\
2*((x < 1) ? (
x
)
: (
pow(x, 2)
) )\
"""
assert p == s
def test_ccode_settings():
raises(TypeError, lambda: ccode(sin(x), method="garbage"))
def test_ccode_Indexed():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o = symbols('n m o', integer=True)
i, j, k = Idx('i', n), Idx('j', m), Idx('k', o)
p = CCodePrinter()
p._not_c = set()
x = IndexedBase('x')[j]
assert p._print_Indexed(x) == 'x[j]'
A = IndexedBase('A')[i, j]
assert p._print_Indexed(A) == 'A[%s]' % (m*i+j)
B = IndexedBase('B')[i, j, k]
assert p._print_Indexed(B) == 'B[%s]' % (i*o*m+j*o+k)
assert p._not_c == set()
def test_ccode_Indexed_without_looking_for_contraction():
len_y = 5
y = IndexedBase('y', shape=(len_y,))
x = IndexedBase('x', shape=(len_y,))
Dy = IndexedBase('Dy', shape=(len_y-1,))
i = Idx('i', len_y-1)
e=Eq(Dy[i], (y[i+1]-y[i])/(x[i+1]-x[i]))
code0 = ccode(e.rhs, assign_to=e.lhs, contract=False)
assert code0 == 'Dy[i] = (y[%s] - y[i])/(x[%s] - x[i]);' % (i + 1, i + 1)
def test_ccode_loops_matrix_vector():
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' y[i] = x[j]*A[%s] + y[i];\n' % (i*n + j) +\
' }\n'
'}'
)
c = ccode(A[i, j]*x[j], assign_to=y[i])
assert c == s<|fim▁hole|> # the following line could also be
# [Dummy(s, integer=True) for s in 'im']
# or [Dummy(integer=True) for s in 'im']
i, m = symbols('i m', integer=True, cls=Dummy)
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx(i, m)
expected = (
'for (int i_%(icount)i=0; i_%(icount)i<m_%(mcount)i; i_%(icount)i++){\n'
' y[i_%(icount)i] = x[i_%(icount)i];\n'
'}'
) % {'icount': i.label.dummy_index, 'mcount': m.dummy_index}
code = ccode(x[i], assign_to=y[i])
assert code == expected
def test_ccode_loops_add():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
z = IndexedBase('z')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = x[i] + z[i];\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' y[i] = x[j]*A[%s] + y[i];\n' % (i*n + j) +\
' }\n'
'}'
)
c = ccode(A[i, j]*x[j] + x[i] + z[i], assign_to=y[i])
assert c == s
def test_ccode_loops_multiple_contractions():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' for (int k=0; k<o; k++){\n'
' for (int l=0; l<p; l++){\n'
' y[i] = y[i] + b[%s]*a[%s];\n' % (j*o*p + k*p + l, i*n*o*p + j*o*p + k*p + l) +\
' }\n'
' }\n'
' }\n'
'}'
)
c = ccode(b[j, k, l]*a[i, j, k, l], assign_to=y[i])
assert c == s
def test_ccode_loops_addfactor():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
c = IndexedBase('c')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' for (int k=0; k<o; k++){\n'
' for (int l=0; l<p; l++){\n'
' y[i] = (a[%s] + b[%s])*c[%s] + y[i];\n' % (i*n*o*p + j*o*p + k*p + l, i*n*o*p + j*o*p + k*p + l, j*o*p + k*p + l) +\
' }\n'
' }\n'
' }\n'
'}'
)
c = ccode((a[i, j, k, l] + b[i, j, k, l])*c[j, k, l], assign_to=y[i])
assert c == s
def test_ccode_loops_multiple_terms():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
c = IndexedBase('c')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
s0 = (
'for (int i=0; i<m; i++){\n'
' y[i] = 0;\n'
'}\n'
)
s1 = (
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' for (int k=0; k<o; k++){\n'
' y[i] = b[j]*b[k]*c[%s] + y[i];\n' % (i*n*o + j*o + k) +\
' }\n'
' }\n'
'}\n'
)
s2 = (
'for (int i=0; i<m; i++){\n'
' for (int k=0; k<o; k++){\n'
' y[i] = b[k]*a[%s] + y[i];\n' % (i*o + k) +\
' }\n'
'}\n'
)
s3 = (
'for (int i=0; i<m; i++){\n'
' for (int j=0; j<n; j++){\n'
' y[i] = b[j]*a[%s] + y[i];\n' % (i*n + j) +\
' }\n'
'}\n'
)
c = ccode(
b[j]*a[i, j] + b[k]*a[i, k] + b[j]*b[k]*c[i, j, k], assign_to=y[i])
assert (c == s0 + s1 + s2 + s3[:-1] or
c == s0 + s1 + s3 + s2[:-1] or
c == s0 + s2 + s1 + s3[:-1] or
c == s0 + s2 + s3 + s1[:-1] or
c == s0 + s3 + s1 + s2[:-1] or
c == s0 + s3 + s2 + s1[:-1])<|fim▁end|> |
def test_dummy_loops(): |
<|file_name|>Image.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2011 Henri Kerola
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vaadin.gwtgraphics.client;
/**
* Image represents a raster image that can be embedded into DrawingArea.
*
* @author Henri Kerola
*
*/
public class Image extends AbstractDrawing implements Sizeable, Positionable, Animatable {
/**
* Create a new Image with the given properties.
*
* @param x
* the x-coordinate position of the top-left corner of the image
* in pixels
* @param y
* the y-coordinate position of the top-left corner of the image
* in pixels
* @param width
* the width of the image in pixels
* @param height
* the height of the image in pixels
* @param href
* URL to an image to be shown.
*/
public Image(int x, int y, int width, int height, String href) {
setX(x);
setY(y);
setWidth(width);
setHeight(height);
setHref(href);
}
@Override
public Class<? extends Drawing> getType() {
return Image.class;
}
@Override
public int getX() {
return getImpl().getX(getElement());
}
@Override
public void setX(int x) {
getImpl().setX(getElement(), x, isAttached());
}
@Override
public int getY() {
return getImpl().getY(getElement());
}
@Override
public void setY(int y) {
getImpl().setY(getElement(), y, isAttached());
}
/**
* Returns the URL of the image currently shown.
*
* @return URL of the image
*/
public String getHref() {
return getImpl().getImageHref(getElement());
}
/**
* Sets the URL of the image to be shown.
*
* @param href
* URL of the image to be shown
*/
public void setHref(String href) {
getImpl().setImageHref(getElement(), href);
}
/**
* Returns the width of the Image in pixels.
*
* @return the width of the Image in pixels
*/
@Override
public int getWidth() {
return getImpl().getWidth(getElement());
}
/**
* Sets the width of the Image in pixels.
*
* @param width
* the new width in pixels
*/
@Override
public void setWidth(int width) {
getImpl().setWidth(getElement(), width);
}
@Override
public void setWidth(String width) {
boolean successful = false;
if (width != null && width.endsWith("px")) {
try {
setWidth(Integer.parseInt(width.substring(0, width.length() - 2)));
successful = true;
} catch (NumberFormatException e) {
}
}
if (!successful) {
throw new IllegalArgumentException("Only pixel units (px) are supported");
}
}
/**
* Returns the height of the Image in pixels.
*
* @return the height of the Image in pixels
*/
@Override
public int getHeight() {
return getImpl().getHeight(getElement());
}
/**
* Sets the height of the Image in pixels.
*
* @param height
* the new height in pixels
*/
@Override
public void setHeight(int height) {<|fim▁hole|> getImpl().setHeight(getElement(), height);
}
@Override
public void setHeight(String height) {
boolean successful = false;
if (height != null && height.endsWith("px")) {
try {
setHeight(Integer.parseInt(height.substring(0, height.length() - 2)));
successful = true;
} catch (NumberFormatException e) {
}
}
if (!successful) {
throw new IllegalArgumentException("Only pixel units (px) are supported");
}
}
@Override
public void setPropertyDouble(String property, double value) {
property = property.toLowerCase();
if ("x".equals(property)) {
setX((int) value);
} else if ("y".equals(property)) {
setY((int) value);
} else if ("width".equals(property)) {
setWidth((int) value);
} else if ("height".equals(property)) {
setHeight((int) value);
} else if ("rotation".equals(property)) {
setRotation((int) value);
}
}
}<|fim▁end|> | |
<|file_name|>onesky_test.go<|end_file_name|><|fim▁begin|>// Package onesky tests
// Copyright (c) 2015 Sebastian Czoch <[email protected]>. All rights reserved.
// Use of this source code is governed by a GNU v2 license found in the LICENSE file.
package onesky
import (
"fmt"
"io/ioutil"
"net/url"
"os"
"path"
"regexp"
"testing"
"github.com/jarcoal/httpmock"
"github.com/stretchr/testify/assert"
)
var testEndpoints = map[string]apiEndpoint{
"getFile": apiEndpoint{"projects/%d/translations", "GET"},
"postFile": apiEndpoint{"projects/%d/files", "POST"},
"deleteFile": apiEndpoint{"projects/%d/files", "DELETE"},
"listFiles": apiEndpoint{"projects/%d/files", "GET"},
"importTasks": apiEndpoint{"projects/%d/import-tasks", "GET"},
"importTask": apiEndpoint{"projects/%d/import-tasks/%d", "GET"},
"getTranslationsStatus": apiEndpoint{"projects/%d/translations/status", "GET"},
"getLanguages": apiEndpoint{"projects/%d/languages", "GET"},
}
// TestFull is testing full method on apiEndpoint struct
func TestFull(t *testing.T) {
client := Client{
Secret: "test_secret",
APIKey: "test_apikey",
ProjectID: 1,
}
v := url.Values{}
v.Set("test_key", "test_val")
endpoint := testEndpoints["getFile"]
want := "https://platform\\.api\\.onesky\\.io/1/projects/1/translations\\?api_key=test_apikey&dev_hash=[a-z0-9]+&test_key=test_val×tamp=[0-9]+"
address, err := endpoint.full(&client, v)
if err != nil {
t.Errorf("full() = %+v, %+v, want %+v,nil", address, err, want)
}
found, _ := regexp.MatchString(want, address)
if !found {
t.Errorf("full() = %+v, %+v, want %+v,nil", address, err, want)
}
}
// TestGetEndpoint is testing getEndpoint function
func TestGetEndpoint(t *testing.T) {
endpointName := "not_exist_endpoint"
endpoint, err := getEndpoint(endpointName)
if err == nil {
t.Errorf("getEndpoint(%s) = %+v, %s, want %v,%s", endpointName, endpoint, err, nil, "endpoint not_exist_endpoint not found")
}
endpointName = "getFile"
endpoint, err = getEndpoint(endpointName)
if err != nil {
t.Errorf("getEndpoint(%s) = %+v, %s, want %v,%s", endpointName, endpoint, err, nil, "endpoint not_exist_endpoint not found")
}
}
func TestDeleteFileWithSuccess(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(200, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
err := client.DeleteFile("test.yml")
assert.Nil(t, err)
}
func TestDeleteFileWithFailure(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
err := client.DeleteFile("test.yml")
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestListFilesWithFailure(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
_, err := client.ListFiles(1, 1)
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestListFilesWithSuccess(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(200, `{"meta":{"status":200,"record_count":16},"data":[{"name":"strings.po","string_count":236,"last_import":{"id":123,"status":"in-progress"},"uploaded_at":"2013-10-07T15:27:10+0000","uploaded_at_timestamp":1381159630},{"name":"en.yml","string_count":335,"last_import":{"id":109,"status":"completed"},"uploaded_at":"2013-10-05T12:36:52+0000","uploaded_at_timestamp":1380976612},{"name":"Manuallyinput","string_count":285}]}`))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
res, err := client.ListFiles(1, 1)
assert.Nil(t, err)
assert.Equal(t,
[]FileData{
FileData{
Name: "strings.po",
StringCount: 236,
LastImport: LastImport{
ID: 123,
Status: "in-progress",
},
UpoladedAt: "2013-10-07T15:27:10+0000",
UpoladedAtTimestamp: 1381159630,
},
FileData{
Name: "en.yml",
StringCount: 335,
LastImport: LastImport{
ID: 109,
Status: "completed",
},
UpoladedAt: "2013-10-05T12:36:52+0000",
UpoladedAtTimestamp: 1380976612,
},
FileData{
Name: "Manuallyinput",
StringCount: 285,
LastImport: LastImport{
ID: 0,
Status: "",
},
UpoladedAt: "",
UpoladedAtTimestamp: 0,
},
}, res)
}
func TestDownloadFileWithFailure(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
_, err := client.DownloadFile("test.yml", "en_US")
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestDownloadFileWithSuccess(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(200, `test: translatedTest`))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
res, err := client.DownloadFile("test.yml", "en_US")
fmt.Println(res)
assert.Nil(t, err)
assert.Equal(t, `test: translatedTest`, res)
}
func TestUploadFileWithSuccess(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(201, `{"meta":{"status":201},"data":{"name":"string.po","format":"GNU_PO","language":{"code":"en-US","english_name":"English (United States)","local_name":"English (United States)","locale":"en","region":"US"},"import":{"id":154,"created_at":"2013-10-07T15:27:10+0000","created_at_timestamp":1381159630}}}`))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
tmpdir, err := ioutil.TempDir("", "")
assert.Nil(t, err)
defer os.RemoveAll(tmpdir)
filename := path.Join(tmpdir, "string.po")
ioutil.WriteFile(filename, []byte("test"), 0666)
res, err := client.UploadFile(filename, "GNU_PO", "en_US", true)
assert.Nil(t, err)
assert.Equal(t, UploadData{
Name: "string.po",
Format: "GNU_PO",
Language: Language{
Code: "en-US",
EnglishName: "English (United States)",
LocalName: "English (United States)",
Locale: "en",
Region: "US",
},
Import: TaskData{
ID: 154,
OriginalID: 154.0,
CreateddAt: "2013-10-07T15:27:10+0000",
CreateddAtTimestamp: 1381159630,
},
}, res)
}
func TestUploadFileWithFailure(t *testing.T) {
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
tmpdir, err := ioutil.TempDir("", "")
assert.Nil(t, err)
defer os.RemoveAll(tmpdir)
filename := path.Join(tmpdir, "not_found")
_, err = client.UploadFile(filename, "GNU_PO", "en_US", true)
assert.NotNil(t, err)
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
ioutil.WriteFile(filename, []byte("test"), 0666)
_, err = client.UploadFile(filename, "GNU_PO", "en_US", true)
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestImportTasksWithFailure(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
_, err := client.ImportTasks(map[string]interface{}{"page": 1, "per_page": 50, "status": "all"})
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestImportTasksWithSuccess(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(200, `{"meta":{"status":200},"data":[{"id":"177","file":{"name":"string2.po"},"status":"in-progress","created_at":"2013-10-07T15:25:00+0000","created_at_timestamp":1381159500},{"id":"176","file":{"name":"string.po"},"status":"in-progress","created_at":"2013-10-07T15:27:10+0000","created_at_timestamp":1381159630}]}`))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
res, err := client.ImportTasks(map[string]interface{}{"page": 1, "per_page": 50, "status": "in-progress"})
assert.Nil(t, err)
assert.Equal(t,
[]TaskData{
TaskData{
ID: 177,
OriginalID: "177",
File: TaskFile{
Name: "string2.po",
},
Status: "in-progress",
CreateddAt: "2013-10-07T15:25:00+0000",
CreateddAtTimestamp: 1381159500,
},
TaskData{
ID: 176,
OriginalID: "176",
File: TaskFile{
Name: "string.po",
},
Status: "in-progress",
CreateddAt: "2013-10-07T15:27:10+0000",
CreateddAtTimestamp: 1381159630,
},
}, res)
}
func TestImportTaskWithFailure(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
_, err := client.ImportTask(1)
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestImportTaskWithSuccess(t *testing.T) {
httpmock.Activate()
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(200, `{"meta":{"status":200},"data":{"id":176,"file":{"name":"string.po","format":"GNU_PO","locale":{"code":"en-US","english_name":"English (United States)","local_name":"English (United States)","locale":"en","region":"US"}},"string_count":236,"word_count":1260,"status":"in-progress","created_at":"2013-10-07T15:27:10+0000","created_at_timestamp":1381159630}}`))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
res, err := client.ImportTask(1)
assert.Nil(t, err)
assert.Equal(t,
TaskData{
ID: 176,
OriginalID: float64(176),
File: TaskFile{
Name: "string.po",
Format: "GNU_PO",
Locale: Language{
Code: "en-US",
EnglishName: "English (United States)",
LocalName: "English (United States)",
Locale: "en",
Region: "US",
},
},
StringCount: 236,
WordCount: 1260,
Status: "in-progress",
CreateddAt: "2013-10-07T15:27:10+0000",
CreateddAtTimestamp: 1381159630,
}, res)
}
func TestGetTranslationsStatusWithFailure(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
_, err := client.GetTranslationsStatus("string.po", "ja-JP")
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestGetTranslationsStatusWithSuccess(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(200, `{"meta":{"status":200},"data":{"file_name":"string.po","locale":{"code":"ja-JP","english_name":"Japanese","local_name":"\u65e5\u672c\u8a9e","locale":"ja","region":"JP"},"progress":"92%","string_count":1359,"word_count":3956}}`))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
res, err := client.GetTranslationsStatus("string.po", "ja-JP")
assert.Nil(t, err)
assert.Equal(t,
TranslationsStatus{
FileName: "string.po",
Locale: Language{
Code: "ja-JP",
EnglishName: "Japanese",
LocalName: "日本語",
CustomLocale: "",
Locale: "ja",
Region: "JP",
},
Progress: "92%",
StringCount: 1359,
WordCount: 3956,
}, res)
}
func TestGetLanguagesWithFailure(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(500, ""))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
_, err := client.GetLanguages()
assert.Equal(t, err, fmt.Errorf("bad status: %d", 500))
}
func TestGetLanguagesWithSuccess(t *testing.T) {
httpmock.Activate()
defer httpmock.DeactivateAndReset()
httpmock.RegisterNoResponder(httpmock.NewStringResponder(200, `{"meta":{"status":200,"record_count":17},"data":[{"code":"it","english_name":"Italian","local_name":"Italiano\u0000","custom_locale":"","Locale":"it","region":"","translation_progress":"0.0"},{"code":"de","english_name":"German","local_name":"Deutsch\u0000","custom_locale":"","locale":"de","region":"","translation_progress":"0.0"},{"code":"fr","english_name":"French","local_name":"Français\u0000","custom_locale":"","locale":"fr","region":"","translation_progress":"0.0"}]}`))
client := Client{APIKey: "abcdef", Secret: "abcdef", ProjectID: 1}
res, err := client.GetLanguages()
assert.Nil(t, err)
assert.Equal(t,
[]Language{
Language{
Code: "it",
EnglishName: "Italian",
LocalName: "Italiano\u0000",
CustomLocale: "",
Locale: "it",
Region: "",
TranslationProgress: "0.0",
},<|fim▁hole|> LocalName: "Deutsch\u0000",
CustomLocale: "",
Locale: "de",
Region: "",
TranslationProgress: "0.0",
},
Language{
Code: "fr",
EnglishName: "French",
LocalName: "Français\u0000",
CustomLocale: "",
Locale: "fr",
Region: "",
TranslationProgress: "0.0",
},
}, res)
}<|fim▁end|> | Language{
Code: "de",
EnglishName: "German", |
<|file_name|>test_meta_data_provider.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
import base64
import unittest
from ingenico.connect.sdk.defaultimpl.default_marshaller import DefaultMarshaller
from ingenico.connect.sdk.domain.metadata.shopping_cart_extension import ShoppingCartExtension
from ingenico.connect.sdk.meta_data_provider import MetaDataProvider
from ingenico.connect.sdk.request_header import RequestHeader
class MetaDataProviderTest(unittest.TestCase):
"""Contains tests to check that the meta data provider correctly stores allowed request headers
and refuses prohibited headers
"""
def test_server_meta_data_headers_full(self):
"""Tests that the MetaDataProvider can construct meta_data_headers when supplied with a full shopping cart"""
shopping_cart_extension = ShoppingCartExtension("Ingenico.creator", "Extension", "1.0", "ExtensionId")
meta_data_provider = MetaDataProvider("Ingenico", shopping_cart_extension)
request_headers = meta_data_provider.meta_data_headers
self.assertEqual(1, len(request_headers))
self.assertServerMetaInfo(meta_data_provider, "Ingenico", shopping_cart_extension, request_headers[0])
def test_server_meta_data_headers_full_no_shopping_cart_extension_id(self):
"""Tests that the MetaDataProvider can construct meta_data_headers when supplied with a full shopping cart"""
shopping_cart_extension = ShoppingCartExtension("Ingenico.creator", "Extension", "1.0")
meta_data_provider = MetaDataProvider("Ingenico", shopping_cart_extension)
request_headers = meta_data_provider.meta_data_headers
self.assertEqual(1, len(request_headers))
self.assertServerMetaInfo(meta_data_provider, "Ingenico", shopping_cart_extension, request_headers[0])
def test_get_server_metadata_headers_no_additional_headers(self):
"""Tests that the MetaDataProvider functions correctly without any additional headers as arguments"""
meta_data_provider = MetaDataProvider("Ingenico")
request_headers = meta_data_provider.meta_data_headers
self.assertEqual(1, len(request_headers))
self.assertServerMetaInfo(meta_data_provider, "Ingenico", None, request_headers[0])
def test_get_server_metadata_headers_additional_headers(self):
"""Tests that the MetaDataProvider can handle multiple additional headers"""
additional_headers = [RequestHeader("Header1", "&=$%"), RequestHeader("Header2", "blah blah"),
RequestHeader("Header3", "foo")]
meta_data_provider = MetaDataProvider("Ingenico", None, additional_headers)
request_headers = meta_data_provider.meta_data_headers
self.assertEqual(4, len(request_headers))
for index in range(1, 4):
self.assertEqual(additional_headers[index-1].name, request_headers[index].name)
self.assertEqual(additional_headers[index-1].value, request_headers[index].value)
def test_constructor_with_prohibited_headers(self):
"""Tests that the MetaDataProvider constructor does not accept any headers marked as prohibited"""
for name in MetaDataProvider.prohibited_headers:
additional_headers = [RequestHeader("Header1", "Value1"),
RequestHeader(name, "should be slashed and burnt"),
RequestHeader("Header3", "Value3")]
with self.assertRaises(Exception) as error:
MetaDataProvider("Ingenico", None, additional_headers)
self.assertIn(name, str(error.exception))<|fim▁hole|>
def assertServerMetaInfo(self, meta_data_provider, integrator, shopping_cart_extension=None, request_header=None):
"""Assert that checks that the request_header is the default header "X-GCS-ServerMetaInfo",
that the server_meta_data_info of the meta_data_provider is correct
and that the shopping cart extension is consistent with the extension stored in meta_data_provider
"""
self.assertEqual("X-GCS-ServerMetaInfo", request_header.name)
self.assertIsNotNone(request_header.value)
# server_meta_info is stored in json format and encoded using utf-8 and base64 encoding, decode it
server_meta_info_json = base64.b64decode(request_header.value).decode('utf-8')
server_meta_info = DefaultMarshaller.INSTANCE().unmarshal(server_meta_info_json, MetaDataProvider.ServerMetaInfo)
self.assertEqual(meta_data_provider._platform_identifier, server_meta_info.platform_identifier)
self.assertEqual(meta_data_provider._sdk_identifier, server_meta_info.sdk_identifier)
self.assertEqual("Ingenico", server_meta_info.sdk_creator)
self.assertEqual(integrator, server_meta_info.integrator)
if shopping_cart_extension is None:
self.assertIsNone(server_meta_info.shopping_cart_extension)
else:
self.assertEqual(shopping_cart_extension.creator, server_meta_info.shopping_cart_extension.creator)
self.assertEqual(shopping_cart_extension.name, server_meta_info.shopping_cart_extension.name)
self.assertEqual(shopping_cart_extension.version, server_meta_info.shopping_cart_extension.version)
self.assertEqual(shopping_cart_extension.extension_id, server_meta_info.shopping_cart_extension.extension_id)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>check-quadrant.py<|end_file_name|><|fim▁begin|># Brandon Michael
# cis142
# checkForQuadrant.py
# Goal: This program will keep asking for input values to check for the quadrant postion,
# origin, x-axis and y axis postions
# Notes: I used a while loop to make testing values easier and I used the input x,y
# Display program instructions
print("###################################################")
print("Quadrant Finder 1.0")
print("Enter the x and y coordinates to find the quadrant!")
print("Type [exit] to quit the program")
print("###################################################")
# Setup the x and y variables
xValue = None
yValue = None
# Setup a loop that breaks when you type exit
while True:
# Get the input values in a X,Y format
inputCoordinates = input("Type in coordinates [x,y]: ")
# Check if exit was typed, if so then exit the loop and end
if inputCoordinates == "exit":
break # stops the loop
# We want to make sure we can only strip out 2 input values
# and make sure there is a comma separating them
elif len(inputCoordinates.strip().split(',')) == 2 and inputCoordinates.count(',') == 1:
# Loop over the two numbers that are stripped out by the comma value
for coordinate in inputCoordinates.strip().split(','):
# This checks to see if we have set a value for x
# If it is still set to None then the first value is going to be xValue
if xValue is None:
xValue = int(coordinate)
# Since we are checking the xValue we can assume when the loop comes back
# a second time we can set it to yValue
else:
yValue = int(coordinate)
# If its a 0,0 value then its the Origin
if xValue == 0 and yValue == 0:
print("Origin")
else:
# If x = 0 and the y is greater or less than 0 its on the Y axis
if xValue == 0 and (yValue < 0 or yValue > 0):
print("Y - Axis")
# If x is greater or less than 0 and y = 0 its on the X axis
elif (xValue < 0 or xValue > 0) and yValue == 0:
print("X - Axis")
# Anything else and we need to check for quadrants
else:
# If x is a positive number and y is a negative positive its in Quadrant 1
if xValue > 0 and yValue > 0:
print("Quadrant I")
# If x is a negative number and y is a positive number then its in Quadrant 2
elif xValue < 0 and yValue > 0:
print("Quadrant II")
# If x is a negative number and y is negative number then its in Quadrant 3
elif xValue < 0 and yValue < 0:
print("Quadrant III")
# If x is a positive number and y is a negative number then its in Quadrant 4<|fim▁hole|> # If they typed anything but 2 numbers separated by a comma then ask for the input again
else:
print("Please type the input value as x,y")
print("Example: 1,-9")<|fim▁end|> | elif xValue > 0 and yValue < 0:
print("Quadrant IV") |
<|file_name|>CaptureFactoryImpl.java<|end_file_name|><|fim▁begin|>package com.intershop.adapter.payment.partnerpay.internal.service.capture;
import javax.inject.Inject;
import com.google.inject.Injector;
import com.intershop.adapter.payment.partnerpay.capi.service.capture.CaptureFactory;
import com.intershop.api.service.payment.v1.capability.Capture;
public class CaptureFactoryImpl implements CaptureFactory
{
@Inject
private Injector injector;
@Override
public Capture createCapture()
{
Capture ret = new CaptureImpl();
<|fim▁hole|>
return ret;
}
}<|fim▁end|> | injector.injectMembers(ret); |
<|file_name|>WikipediaArticleReader.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2011 Diego Ceccarelli
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.cnr.isti.hpc.wikipedia.reader;
import info.bliki.wiki.dump.IArticleFilter;
import info.bliki.wiki.dump.Siteinfo;
import info.bliki.wiki.dump.WikiArticle;
import info.bliki.wiki.dump.WikiXMLParser;
import it.cnr.isti.hpc.benchmark.Stopwatch;
import it.cnr.isti.hpc.io.IOUtils;
import it.cnr.isti.hpc.log.ProgressLogger;
import it.cnr.isti.hpc.wikipedia.article.Article;
import it.cnr.isti.hpc.wikipedia.article.Article.Type;
import it.cnr.isti.hpc.wikipedia.parser.ArticleParser;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
/**
* A reader that converts a Wikipedia dump in its json dump. The json dump will
* contain all the article in the XML dump, one article per line. Each line will
* be compose by the json serialization of the object Article.
*
* @see Article
*
* @author Diego Ceccarelli, [email protected] created on 18/nov/2011
*/
public class WikipediaArticleReader {
/**
* Logger for this class
*/
private static final Logger logger = LoggerFactory
.getLogger(WikipediaArticleReader.class);<|fim▁hole|> private BufferedWriter out;
private ArticleParser parser;
// private JsonRecordParser<Article> encoder;
private static ProgressLogger pl = new ProgressLogger("parsed {} articles",
10000);
private static Stopwatch sw = new Stopwatch();
/**
* Generates a converter from the xml to json dump.
*
* @param inputFile
* - the xml file (compressed)
* @param outputFile
* - the json output file, containing one article per line (if
* the filename ends with <tt>.gz </tt> the output will be
* compressed).
*
* @param lang
* - the language of the dump
*
*
*/
public WikipediaArticleReader(String inputFile, String outputFile,
String lang) {
this(new File(inputFile), new File(outputFile), lang);
}
/**
* Generates a converter from the xml to json dump.
*
* @param inputFile
* - the xml file (compressed)
* @param outputFile
* - the json output file, containing one article per line (if
* the filename ends with <tt>.gz </tt> the output will be
* compressed).
*
* @param lang
* - the language of the dump
*
*
*/
public WikipediaArticleReader(File inputFile, File outputFile, String lang) {
JsonConverter handler = new JsonConverter();
// encoder = new JsonRecordParser<Article>(Article.class);
parser = new ArticleParser(lang);
try {
wxp = new WikiXMLParser(inputFile.getAbsolutePath(), handler);
} catch (Exception e) {
logger.error("creating the parser {}", e.toString());
System.exit(-1);
}
out = IOUtils.getPlainOrCompressedUTF8Writer(outputFile
.getAbsolutePath());
}
/**
* Starts the parsing
*/
public void start() throws IOException, SAXException {
wxp.parse();
out.close();
logger.info(sw.stat("articles"));
}
private class JsonConverter implements IArticleFilter {
public void process(WikiArticle page, Siteinfo si) {
pl.up();
sw.start("articles");
String title = page.getTitle();
String id = page.getId();
String namespace = page.getNamespace();
Integer integerNamespace = page.getIntegerNamespace();
String timestamp = page.getTimeStamp();
Type type = Type.UNKNOWN;
if (page.isCategory())
type = Type.CATEGORY;
if (page.isTemplate()) {
type = Type.TEMPLATE;
// FIXME just to go fast;
sw.stop("articles");
return;
}
if (page.isProject()) {
type = Type.PROJECT;
// FIXME just to go fast;
sw.stop("articles");
return;
}
if (page.isFile()) {
type = Type.FILE;
// FIXME just to go fast;
sw.stop("articles");
return;
}
if (page.isMain())
type = Type.ARTICLE;
Article article = new Article();
article.setTitle(title);
article.setWikiId(Integer.parseInt(id));
article.setNamespace(namespace);
article.setIntegerNamespace(integerNamespace);
article.setTimestamp(timestamp);
article.setType(type);
parser.parse(article, page.getText());
try {
out.write(article.toJson());
out.write("\n");
} catch (IOException e) {
logger.error("writing the output file {}", e.toString());
System.exit(-1);
}
sw.stop("articles");
return;
}
}
}<|fim▁end|> |
private WikiXMLParser wxp; |
<|file_name|>graph.py<|end_file_name|><|fim▁begin|>"""Base class for undirected graphs.
The Graph class allows any hashable object as a node
and can associate key/value attribute pairs with each undirected edge.
Self-loops are allowed but multiple edges are not (see MultiGraph).
For directed graphs see DiGraph and MultiDiGraph.
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
from copy import deepcopy
import networkx as nx
from networkx.exception import NetworkXError
import networkx.convert as convert
__author__ = """\n""".join(['Aric Hagberg ([email protected])',
'Pieter Swart ([email protected])',
'Dan Schult([email protected])'])
class Graph(object):
"""
Base class for undirected graphs.
A Graph stores nodes and edges with optional data, or attributes.
Graphs hold undirected edges. Self loops are allowed but multiple
(parallel) edges are not.
Nodes can be arbitrary (hashable) Python objects with optional
key/value attributes.
Edges are represented as links between nodes with optional
key/value attributes.
Parameters
----------
data : input graph
Data to initialize graph. If data=None (default) an empty
graph is created. The data can be an edge list, or any
NetworkX graph object. If the corresponding optional Python
packages are installed the data can also be a NumPy matrix
or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
DiGraph
MultiGraph
MultiDiGraph
Examples
--------
Create an empty graph structure (a "null graph") with no nodes and
no edges.
>>> G = nx.Graph()
G can be grown in several ways.
**Nodes:**
Add one node at a time:
>>> G.add_node(1)
Add the nodes from any container (a list, dict, set or
even the lines from a file or the nodes from another graph).
>>> G.add_nodes_from([2,3])
>>> G.add_nodes_from(range(100,110))
>>> H=nx.Graph()
>>> H.add_path([0,1,2,3,4,5,6,7,8,9])
>>> G.add_nodes_from(H)
In addition to strings and integers any hashable Python object
(except None) can represent a node, e.g. a customized node object,
or even another Graph.
>>> G.add_node(H)
**Edges:**
G can also be grown by adding edges.
Add one edge,
>>> G.add_edge(1, 2)
a list of edges,
>>> G.add_edges_from([(1,2),(1,3)])
or a collection of edges,
>>> G.add_edges_from(H.edges())
If some edges connect nodes not yet in the graph, the nodes
are added automatically. There are no errors when adding
nodes or edges that already exist.
**Attributes:**
Each graph, node, and edge can hold key/value attribute pairs
in an associated attribute dictionary (the keys must be hashable).
By default these are empty, but can be added or changed using
add_edge, add_node or direct manipulation of the attribute
dictionaries named graph, node and edge respectively.
>>> G = nx.Graph(day="Friday")
>>> G.graph
{'day': 'Friday'}
Add node attributes using add_node(), add_nodes_from() or G.node
>>> G.add_node(1, time='5pm')
>>> G.add_nodes_from([3], time='2pm')
>>> G.node[1]
{'time': '5pm'}
>>> G.node[1]['room'] = 714
>>> del G.node[1]['room'] # remove attribute
>>> list(G.nodes(data=True))
[(1, {'time': '5pm'}), (3, {'time': '2pm'})]
Warning: adding a node to G.node does not add it to the graph.
Add edge attributes using add_edge(), add_edges_from(), subscript
notation, or G.edge.
>>> G.add_edge(1, 2, weight=4.7 )
>>> G.add_edges_from([(3,4),(4,5)], color='red')
>>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})])
>>> G[1][2]['weight'] = 4.7
>>> G.edge[1][2]['weight'] = 4
**Shortcuts:**
Many common graph features allow python syntax to speed reporting.
>>> 1 in G # check if node in graph
True
>>> [n for n in G if n<3] # iterate through nodes
[1, 2]
>>> len(G) # number of nodes in graph
5
The fastest way to traverse all edges of a graph is via
adjacency(), but the edges() method is often more convenient.
>>> for n,nbrsdict in G.adjacency():
... for nbr,eattr in nbrsdict.items():
... if 'weight' in eattr:
... (n,nbr,eattr['weight'])
(1, 2, 4)
(2, 1, 4)
(2, 3, 8)
(3, 2, 8)
>>> list(G.edges(data='weight'))
[(1, 2, 4), (2, 3, 8), (3, 4, None), (4, 5, None)]
**Reporting:**
Simple graph information is obtained using methods.
Reporting methods usually return iterators instead of containers
to reduce memory usage.
Methods exist for reporting nodes(), edges(), neighbors() and degree()
as well as the number of nodes and edges.
For details on these and other miscellaneous methods, see below.
**Subclasses (Advanced):**
The Graph class uses a dict-of-dict-of-dict data structure.
The outer dict (node_dict) holds adjacency lists keyed by node.
The next dict (adjlist) represents the adjacency list and holds
edge data keyed by neighbor. The inner dict (edge_attr) represents
the edge data and holds edge attribute values keyed by attribute names.
Each of these three dicts can be replaced by a user defined
dict-like object. In general, the dict-like features should be
maintained but extra features can be added. To replace one of the
dicts create a new graph class by changing the class(!) variable
holding the factory for that dict-like structure. The variable names
are node_dict_factory, adjlist_dict_factory and edge_attr_dict_factory.
node_dict_factory : function, (default: dict)
Factory function to be used to create the outer-most dict
in the data structure that holds adjacency lists keyed by node.
It should require no arguments and return a dict-like object.
adjlist_dict_factory : function, (default: dict)
Factory function to be used to create the adjacency list
dict which holds edge data keyed by neighbor.
It should require no arguments and return a dict-like object
edge_attr_dict_factory : function, (default: dict)
Factory function to be used to create the edge attribute
dict which holds attrbute values keyed by attribute name.
It should require no arguments and return a dict-like object.
Examples
--------
Create a graph object that tracks the order nodes are added.
>>> from collections import OrderedDict
>>> class OrderedNodeGraph(nx.Graph):
... node_dict_factory=OrderedDict
>>> G=OrderedNodeGraph()
>>> G.add_nodes_from( (2,1) )
>>> list(G.nodes())
[2, 1]
>>> G.add_edges_from( ((2,2), (2,1), (1,1)) )
>>> list(G.edges())
[(2, 1), (2, 2), (1, 1)]
Create a graph object that tracks the order nodes are added
and for each node track the order that neighbors are added.
>>> class OrderedGraph(nx.Graph):
... node_dict_factory = OrderedDict
... adjlist_dict_factory = OrderedDict
>>> G = OrderedGraph()
>>> G.add_nodes_from( (2,1) )
>>> list(G.nodes())
[2, 1]
>>> G.add_edges_from( ((2,2), (2,1), (1,1)) )
>>> list(G.edges())
[(2, 2), (2, 1), (1, 1)]
Create a low memory graph class that effectively disallows edge
attributes by using a single attribute dict for all edges.
This reduces the memory used, but you lose edge attributes.
>>> class ThinGraph(nx.Graph):
... all_edge_dict = {'weight': 1}
... def single_edge_dict(self):
... return self.all_edge_dict
... edge_attr_dict_factory = single_edge_dict
>>> G = ThinGraph()
>>> G.add_edge(2,1)
>>> list(G.edges(data= True))
[(1, 2, {'weight': 1})]
>>> G.add_edge(2,2)
>>> G[2][1] is G[2][2]
True
"""
node_dict_factory = dict
adjlist_dict_factory = dict
edge_attr_dict_factory = dict
def __init__(self, data=None, **attr):
"""Initialize a graph with edges, name, graph attributes.
Parameters
----------
data : input graph
Data to initialize graph. If data=None (default) an empty
graph is created. The data can be an edge list, or any
NetworkX graph object. If the corresponding optional Python
packages are installed the data can also be a NumPy matrix
or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph.
name : string, optional (default='')
An optional name for the graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
convert
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G = nx.Graph(name='my graph')
>>> e = [(1,2),(2,3),(3,4)] # list of edges
>>> G = nx.Graph(e)
Arbitrary graph attribute pairs (key=value) may be assigned
>>> G=nx.Graph(e, day="Friday")
>>> G.graph
{'day': 'Friday'}
"""
self.node_dict_factory = ndf = self.node_dict_factory
self.adjlist_dict_factory = self.adjlist_dict_factory
self.edge_attr_dict_factory = self.edge_attr_dict_factory
self.graph = {} # dictionary for graph attributes
self.node = ndf() # empty node attribute dict
self.adj = ndf() # empty adjacency dict
# attempt to load graph with data
if data is not None:
convert.to_networkx_graph(data, create_using=self)
# load graph attributes (must be after convert)
self.graph.update(attr)
self.edge = self.adj
@property
def name(self):
return self.graph.get('name', '')
@name.setter
def name(self, s):
self.graph['name'] = s
def __str__(self):
"""Return the graph name.
Returns
-------
name : string
The name of the graph.
Examples
--------
>>> G = nx.Graph(name='foo')
>>> str(G)
'foo'
"""
return self.name
def __iter__(self):
"""Iterate over the nodes. Use the expression 'for n in G'.
Returns
-------
niter : iterator
An iterator over all nodes in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
"""
return iter(self.node)
def __contains__(self, n):
"""Return True if n is a node, False otherwise. Use the expression
'n in G'.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> 1 in G
True
"""
try:
return n in self.node
except TypeError:
return False
def __len__(self):
"""Return the number of nodes. Use the expression 'len(G)'.
Returns
-------
nnodes : int
The number of nodes in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> len(G)
4
"""
return len(self.node)
def __getitem__(self, n):
"""Return a dict of neighbors of node n. Use the expression 'G[n]'.
Parameters
----------
n : node
A node in the graph.
Returns
-------
adj_dict : dictionary
The adjacency dictionary for nodes connected to n.
Notes
-----
G[n] is similar to G.neighbors(n) but the internal data dictionary
is returned instead of an iterator.
Assigning G[n] will corrupt the internal graph data structure.
Use G[n] for reading data only.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])<|fim▁hole|> >>> G[0]
{1: {}}
"""
return self.adj[n]
def add_node(self, n, attr_dict=None, **attr):
"""Add a single node n and update node attributes.
Parameters
----------
n : node
A node can be any hashable Python object except None.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of node attributes. Key/value pairs will
update existing data associated with the node.
attr : keyword arguments, optional
Set or change attributes using key=value.
See Also
--------
add_nodes_from
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_node(1)
>>> G.add_node('Hello')
>>> K3 = nx.Graph([(0,1),(1,2),(2,0)])
>>> G.add_node(K3)
>>> G.number_of_nodes()
3
Use keywords set/change node attributes:
>>> G.add_node(1,size=10)
>>> G.add_node(3,weight=0.4,UTM=('13S',382871,3972649))
Notes
-----
A hashable object is one that can be used as a key in a Python
dictionary. This includes strings, numbers, tuples of strings
and numbers, etc.
On many platforms hashable items also include mutables such as
NetworkX Graphs, though one should be careful that the hash
doesn't change on mutables.
"""
# set up attribute dict
if attr_dict is None:
attr_dict = attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(
"The attr_dict argument must be a dictionary.")
if n not in self.node:
self.adj[n] = self.adjlist_dict_factory()
self.node[n] = attr_dict
else: # update attr even if node already exists
self.node[n].update(attr_dict)
def add_nodes_from(self, nodes, **attr):
"""Add multiple nodes.
Parameters
----------
nodes : iterable container
A container of nodes (list, dict, set, etc.).
OR
A container of (node, attribute dict) tuples.
Node attributes are updated using the attribute dict.
attr : keyword arguments, optional (default= no attributes)
Update attributes for all nodes in nodes.
Node attributes specified in nodes as a tuple
take precedence over attributes specified generally.
See Also
--------
add_node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_nodes_from('Hello')
>>> K3 = nx.Graph([(0,1),(1,2),(2,0)])
>>> G.add_nodes_from(K3)
>>> sorted(G.nodes(),key=str)
[0, 1, 2, 'H', 'e', 'l', 'o']
Use keywords to update specific node attributes for every node.
>>> G.add_nodes_from([1,2], size=10)
>>> G.add_nodes_from([3,4], weight=0.4)
Use (node, attrdict) tuples to update attributes for specific
nodes.
>>> G.add_nodes_from([(1,dict(size=11)), (2,{'color':'blue'})])
>>> G.node[1]['size']
11
>>> H = nx.Graph()
>>> H.add_nodes_from(G.nodes(data=True))
>>> H.node[1]['size']
11
"""
for n in nodes:
# keep all this inside try/except because
# CPython throws TypeError on n not in self.node,
# while pre-2.7.5 ironpython throws on self.adj[n]
try:
if n not in self.node:
self.adj[n] = self.adjlist_dict_factory()
self.node[n] = attr.copy()
else:
self.node[n].update(attr)
except TypeError:
nn, ndict = n
if nn not in self.node:
self.adj[nn] = self.adjlist_dict_factory()
newdict = attr.copy()
newdict.update(ndict)
self.node[nn] = newdict
else:
olddict = self.node[nn]
olddict.update(attr)
olddict.update(ndict)
def remove_node(self, n):
"""Remove node n.
Removes the node n and all adjacent edges.
Attempting to remove a non-existent node will raise an exception.
Parameters
----------
n : node
A node in the graph
Raises
-------
NetworkXError
If n is not in the graph.
See Also
--------
remove_nodes_from
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> list(G.edges())
[(0, 1), (1, 2)]
>>> G.remove_node(1)
>>> list(G.edges())
[]
"""
adj = self.adj
try:
nbrs = list(adj[n].keys()) # keys handles self-loops (allow mutation later)
del self.node[n]
except KeyError: # NetworkXError if n not in self
raise NetworkXError("The node %s is not in the graph." % (n,))
for u in nbrs:
del adj[u][n] # remove all edges n-u in graph
del adj[n] # now remove node
def remove_nodes_from(self, nodes):
"""Remove multiple nodes.
Parameters
----------
nodes : iterable container
A container of nodes (list, dict, set, etc.). If a node
in the container is not in the graph it is silently
ignored.
See Also
--------
remove_node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> e = list(G.nodes())
>>> e
[0, 1, 2]
>>> G.remove_nodes_from(e)
>>> list(G.nodes())
[]
"""
adj = self.adj
for n in nodes:
try:
del self.node[n]
for u in list(adj[n].keys()): # keys() handles self-loops
del adj[u][n] # (allows mutation of dict in loop)
del adj[n]
except KeyError:
pass
def nodes(self, data=False):
"""Returns an iterator over the nodes.
Parameters
----------
data : boolean, optional (default=False)
If ``False``, the iterator returns nodes. If ``True``,
the iterator return a two-tuple of node and node data
dictionary.
Returns
-------
iterator
An iterator over nodes, or if ``data`` is ``True``, an
iterator over two-tuples of the form ``(node, node data
dictionary)``.
Notes
-----
If the node data is not required, it is simpler and equivalent
to use the expression ``for n in G``, or ``list(G)``.
Examples
--------
There are two simple ways of getting a list of all nodes in the graph::
>>> G = nx.Graph()
>>> G.add_nodes_from(range(3))
>>> list(G.nodes())
[0, 1, 2]
>>> list(G)
[0, 1, 2]
To get the node data along with the nodes::
>>> G.add_node(1, time='5pm')
>>> G.node[0]['foo'] = 'bar'
>>> list(G.nodes(data=True))
[(0, {'foo': 'bar'}), (1, {'time': '5pm'}), (2, {})]
"""
if data:
return iter(self.node.items())
return iter(self.node)
def number_of_nodes(self):
"""Return the number of nodes in the graph.
Returns
-------
nnodes : int
The number of nodes in the graph.
See Also
--------
order, __len__ which are identical
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> len(G)
3
"""
return len(self.node)
def order(self):
"""Return the number of nodes in the graph.
Returns
-------
nnodes : int
The number of nodes in the graph.
See Also
--------
number_of_nodes, __len__ which are identical
"""
return len(self.node)
def has_node(self, n):
"""Return True if the graph contains the node n.
Parameters
----------
n : node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> G.has_node(0)
True
It is more readable and simpler to use
>>> 0 in G
True
"""
try:
return n in self.node
except TypeError:
return False
def add_edge(self, u, v, attr_dict=None, **attr):
"""Add an edge between u and v.
The nodes u and v will be automatically added if they are
not already in the graph.
Edge attributes can be specified with keywords or by providing
a dictionary with key/value pairs. See examples below.
Parameters
----------
u,v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of edge attributes. Key/value pairs will
update existing data associated with the edge.
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
See Also
--------
add_edges_from : add a collection of edges
Notes
-----
Adding an edge that already exists updates the edge data.
Many NetworkX algorithms designed for weighted graphs use as
the edge weight a numerical value assigned to a keyword
which by default is 'weight'.
Examples
--------
The following all add the edge e=(1,2) to graph G:
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> e = (1,2)
>>> G.add_edge(1, 2) # explicit two-node form
>>> G.add_edge(*e) # single edge as tuple of two nodes
>>> G.add_edges_from( [(1,2)] ) # add edges from iterable container
Associate data to edges using keywords:
>>> G.add_edge(1, 2, weight=3)
>>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7)
"""
# set up attribute dictionary
if attr_dict is None:
attr_dict = attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(
"The attr_dict argument must be a dictionary.")
# add nodes
if u not in self.node:
self.adj[u] = self.adjlist_dict_factory()
self.node[u] = {}
if v not in self.node:
self.adj[v] = self.adjlist_dict_factory()
self.node[v] = {}
# add the edge
datadict = self.adj[u].get(v, self.edge_attr_dict_factory())
datadict.update(attr_dict)
self.adj[u][v] = datadict
self.adj[v][u] = datadict
def add_edges_from(self, ebunch, attr_dict=None, **attr):
"""Add all the edges in ebunch.
Parameters
----------
ebunch : container of edges
Each edge given in the container will be added to the
graph. The edges must be given as as 2-tuples (u,v) or
3-tuples (u,v,d) where d is a dictionary containing edge
data.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of edge attributes. Key/value pairs will
update existing data associated with each edge.
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
See Also
--------
add_edge : add a single edge
add_weighted_edges_from : convenient way to add weighted edges
Notes
-----
Adding the same edge twice has no effect but any edge data
will be updated when each duplicate edge is added.
Edge attributes specified in edges take precedence
over attributes specified generally.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edges_from([(0,1),(1,2)]) # using a list of edge tuples
>>> e = zip(range(0,3),range(1,4))
>>> G.add_edges_from(e) # Add the path graph 0-1-2-3
Associate data to edges
>>> G.add_edges_from([(1,2),(2,3)], weight=3)
>>> G.add_edges_from([(3,4),(1,4)], label='WN2898')
"""
# set up attribute dict
if attr_dict is None:
attr_dict = attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(
"The attr_dict argument must be a dictionary.")
# process ebunch
for e in ebunch:
ne = len(e)
if ne == 3:
u, v, dd = e
elif ne == 2:
u, v = e
dd = {} # doesnt need edge_attr_dict_factory
else:
raise NetworkXError(
"Edge tuple %s must be a 2-tuple or 3-tuple." % (e,))
if u not in self.node:
self.adj[u] = self.adjlist_dict_factory()
self.node[u] = {}
if v not in self.node:
self.adj[v] = self.adjlist_dict_factory()
self.node[v] = {}
datadict = self.adj[u].get(v, self.edge_attr_dict_factory())
datadict.update(attr_dict)
datadict.update(dd)
self.adj[u][v] = datadict
self.adj[v][u] = datadict
def add_weighted_edges_from(self, ebunch, weight='weight', **attr):
"""Add all the edges in ebunch as weighted edges with specified
weights.
Parameters
----------
ebunch : container of edges
Each edge given in the list or container will be added
to the graph. The edges must be given as 3-tuples (u,v,w)
where w is a number.
weight : string, optional (default= 'weight')
The attribute name for the edge weights to be added.
attr : keyword arguments, optional (default= no attributes)
Edge attributes to add/update for all edges.
See Also
--------
add_edge : add a single edge
add_edges_from : add multiple edges
Notes
-----
Adding the same edge twice for Graph/DiGraph simply updates
the edge data. For MultiGraph/MultiDiGraph, duplicate edges
are stored.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_weighted_edges_from([(0,1,3.0),(1,2,7.5)])
"""
self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch),
**attr)
def remove_edge(self, u, v):
"""Remove the edge between u and v.
Parameters
----------
u,v: nodes
Remove the edge between nodes u and v.
Raises
------
NetworkXError
If there is not an edge between u and v.
See Also
--------
remove_edges_from : remove a collection of edges
Examples
--------
>>> G = nx.Graph() # or DiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.remove_edge(0,1)
>>> e = (1,2)
>>> G.remove_edge(*e) # unpacks e from an edge tuple
>>> e = (2,3,{'weight':7}) # an edge with attribute data
>>> G.remove_edge(*e[:2]) # select first part of edge tuple
"""
try:
del self.adj[u][v]
if u != v: # self-loop needs only one entry removed
del self.adj[v][u]
except KeyError:
raise NetworkXError("The edge %s-%s is not in the graph" % (u, v))
def remove_edges_from(self, ebunch):
"""Remove all edges specified in ebunch.
Parameters
----------
ebunch: list or container of edge tuples
Each edge given in the list or container will be removed
from the graph. The edges can be:
- 2-tuples (u,v) edge between u and v.
- 3-tuples (u,v,k) where k is ignored.
See Also
--------
remove_edge : remove a single edge
Notes
-----
Will fail silently if an edge in ebunch is not in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> ebunch=[(1,2),(2,3)]
>>> G.remove_edges_from(ebunch)
"""
adj = self.adj
for e in ebunch:
u, v = e[:2] # ignore edge data if present
if u in adj and v in adj[u]:
del adj[u][v]
if u != v: # self loop needs only one entry removed
del adj[v][u]
def has_edge(self, u, v):
"""Return True if the edge (u,v) is in the graph.
Parameters
----------
u,v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
Returns
-------
edge_ind : bool
True if edge is in the graph, False otherwise.
Examples
--------
Can be called either using two nodes u,v or edge tuple (u,v)
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.has_edge(0,1) # using two nodes
True
>>> e = (0,1)
>>> G.has_edge(*e) # e is a 2-tuple (u,v)
True
>>> e = (0,1,{'weight':7})
>>> G.has_edge(*e[:2]) # e is a 3-tuple (u,v,data_dictionary)
True
The following syntax are all equivalent:
>>> G.has_edge(0,1)
True
>>> 1 in G[0] # though this gives KeyError if 0 not in G
True
"""
try:
return v in self.adj[u]
except KeyError:
return False
def neighbors(self, n):
"""Return an iterator over all neighbors of node n.
Parameters
----------
n : node
A node in the graph
Returns
-------
neighbors : iterator
An iterator over all neighbors of node n
Raises
------
NetworkXError
If the node n is not in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> [n for n in G.neighbors(0)]
[1]
Notes
-----
It is usually more convenient (and faster) to access the
adjacency dictionary as ``G[n]``:
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge('a', 'b', weight=7)
>>> G['a']
{'b': {'weight': 7}}
>>> G = nx.path_graph(4)
>>> [n for n in G[0]]
[1]
"""
try:
return iter(self.adj[n])
except KeyError:
raise NetworkXError("The node %s is not in the graph." % (n,))
def edges(self, nbunch=None, data=False, default=None):
"""Return an iterator over the edges.
Edges are returned as tuples with optional data
in the order (node, neighbor, data).
Parameters
----------
nbunch : iterable container, optional (default= all nodes)
A container of nodes. The container will be iterated
through once.
data : string or bool, optional (default=False)
The edge attribute returned in 3-tuple (u,v,ddict[data]).
If True, return edge attribute dict in 3-tuple (u,v,ddict).
If False, return 2-tuple (u,v).
default : value, optional (default=None)
Value used for edges that dont have the requested attribute.
Only relevant if data is not True or False.
Returns
-------
edges : iterator
An iterator of (u,v) or (u,v,d) tuples of edges.
Notes
-----
Nodes in nbunch that are not in the graph will be (quietly) ignored.
For directed graphs this returns the out-edges.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_path([0,1,2])
>>> G.add_edge(2,3,weight=5)
>>> [e for e in G.edges()]
[(0, 1), (1, 2), (2, 3)]
>>> list(G.edges(data=True)) # default data is {} (empty dict)
[(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]
>>> list(G.edges(data='weight', default=1))
[(0, 1, 1), (1, 2, 1), (2, 3, 5)]
>>> list(G.edges([0,3]))
[(0, 1), (3, 2)]
>>> list(G.edges(0))
[(0, 1)]
"""
seen = {} # helper dict to keep track of multiply stored edges
if nbunch is None:
nodes_nbrs = self.adj.items()
else:
nodes_nbrs = ((n, self.adj[n]) for n in self.nbunch_iter(nbunch))
if data is True:
for n, nbrs in nodes_nbrs:
for nbr, ddict in nbrs.items():
if nbr not in seen:
yield (n, nbr, ddict)
seen[n] = 1
elif data is not False:
for n, nbrs in nodes_nbrs:
for nbr, ddict in nbrs.items():
if nbr not in seen:
d = ddict[data] if data in ddict else default
yield (n, nbr, d)
seen[n] = 1
else: # data is False
for n, nbrs in nodes_nbrs:
for nbr in nbrs:
if nbr not in seen:
yield (n, nbr)
seen[n] = 1
del seen
def get_edge_data(self, u, v, default=None):
"""Return the attribute dictionary associated with edge (u,v).
Parameters
----------
u,v : nodes
default: any Python object (default=None)
Value to return if the edge (u,v) is not found.
Returns
-------
edge_dict : dictionary
The edge attribute dictionary.
Notes
-----
It is faster to use G[u][v].
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G[0][1]
{}
Warning: Assigning G[u][v] corrupts the graph data structure.
But it is safe to assign attributes to that dictionary,
>>> G[0][1]['weight'] = 7
>>> G[0][1]['weight']
7
>>> G[1][0]['weight']
7
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.get_edge_data(0,1) # default edge data is {}
{}
>>> e = (0,1)
>>> G.get_edge_data(*e) # tuple form
{}
>>> G.get_edge_data('a','b',default=0) # edge not in graph, return 0
0
"""
try:
return self.adj[u][v]
except KeyError:
return default
def adjacency(self):
"""Return an iterator of (node, adjacency dict) tuples for all nodes.
This is the fastest way to look at every edge.
For directed graphs, only outgoing adjacencies are included.
Returns
-------
adj_iter : iterator
An iterator of (node, adjacency dictionary) for all nodes in
the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> [(n,nbrdict) for n,nbrdict in G.adjacency()]
[(0, {1: {}}), (1, {0: {}, 2: {}}), (2, {1: {}, 3: {}}), (3, {2: {}})]
"""
return iter(self.adj.items())
def degree(self, nbunch=None, weight=None):
"""Return an iterator for (node, degree) or degree for single node.
The node degree is the number of edges adjacent to the node.
This function returns the degree for a single node or an iterator
for a bunch of nodes or if nothing is passed as argument.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
If a single node is requested
deg:
Degree of the node
OR if multiple nodes are requested
nd_iter : an iterator
The iterator returns two-tuples of (node, degree).
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.degree(0) # node 0 with degree 1
1
>>> list(G.degree([0,1]))
[(0, 1), (1, 2)]
"""
# Test to see if nbunch is a single node, an iterator of nodes or
# None(indicating all nodes). (nbunch in self) is True when nbunch
# is a single node.
if nbunch in self:
nbrs = self.adj[nbunch]
if weight is None:
return len(nbrs) + (1 if nbunch in nbrs else 0) # handle self-loops
return sum(dd.get(weight, 1) for nbr,dd in nbrs.items()) +\
(nbrs[nbunch].get(weight, 1) if nbunch in nbrs else 0)
if nbunch is None:
nodes_nbrs = self.adj.items()
else:
nodes_nbrs = ((n, self.adj[n]) for n in self.nbunch_iter(nbunch))
if weight is None:
def d_iter():
for n, nbrs in nodes_nbrs:
yield (n, len(nbrs) + (1 if n in nbrs else 0)) # return tuple (n,degree)
else:
def d_iter():
for n, nbrs in nodes_nbrs:
yield (n, sum((nbrs[nbr].get(weight, 1) for nbr in nbrs)) +
(nbrs[n].get(weight, 1) if n in nbrs else 0))
return d_iter()
def clear(self):
"""Remove all nodes and edges from the graph.
This also removes the name, and all graph, node, and edge attributes.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.clear()
>>> list(G.nodes())
[]
>>> list(G.edges())
[]
"""
self.name = ''
self.adj.clear()
self.node.clear()
self.graph.clear()
def copy(self):
"""Return a copy of the graph.
Returns
-------
G : Graph
A copy of the graph.
See Also
--------
to_directed: return a directed copy of the graph.
Notes
-----
This makes a complete copy of the graph including all of the
node or edge attributes.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> H = G.copy()
"""
return deepcopy(self)
def is_multigraph(self):
"""Return True if graph is a multigraph, False otherwise."""
return False
def is_directed(self):
"""Return True if graph is directed, False otherwise."""
return False
def to_directed(self):
"""Return a directed representation of the graph.
Returns
-------
G : DiGraph
A directed graph with the same name, same nodes, and with
each edge (u,v,data) replaced by two directed edges
(u,v,data) and (v,u,data).
Notes
-----
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar D=DiGraph(G) which returns a
shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, http://docs.python.org/library/copy.html.
Warning: If you have subclassed Graph to use dict-like objects in the
data structure, those changes do not transfer to the DiGraph
created by this method.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> list(H.edges())
[(0, 1), (1, 0)]
If already directed, return a (deep) copy
>>> G = nx.DiGraph() # or MultiDiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> list(H.edges())
[(0, 1)]
"""
from networkx import DiGraph
G = DiGraph()
G.name = self.name
G.add_nodes_from(self)
G.add_edges_from(((u, v, deepcopy(data))
for u, nbrs in self.adjacency()
for v, data in nbrs.items()))
G.graph = deepcopy(self.graph)
G.node = deepcopy(self.node)
return G
def to_undirected(self):
"""Return an undirected copy of the graph.
Returns
-------
G : Graph/MultiGraph
A deepcopy of the graph.
See Also
--------
copy, add_edge, add_edges_from
Notes
-----
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar G=DiGraph(D) which returns a
shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, http://docs.python.org/library/copy.html.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> list(H.edges())
[(0, 1), (1, 0)]
>>> G2 = H.to_undirected()
>>> list(G2.edges())
[(0, 1)]
"""
return deepcopy(self)
def subgraph(self, nbunch):
"""Return the subgraph induced on nodes in nbunch.
The induced subgraph of the graph contains the nodes in nbunch
and the edges between those nodes.
Parameters
----------
nbunch : list, iterable
A container of nodes which will be iterated through once.
Returns
-------
G : Graph
A subgraph of the graph with the same edge attributes.
Notes
-----
The graph, edge or node attributes just point to the original graph.
So changes to the node or edge structure will not be reflected in
the original graph while changes to the attributes will.
To create a subgraph with its own copy of the edge/node attributes use:
nx.Graph(G.subgraph(nbunch))
If edge attributes are containers, a deep copy can be obtained using:
G.subgraph(nbunch).copy()
For an inplace reduction of a graph to a subgraph you can remove nodes:
G.remove_nodes_from([ n in G if n not in set(nbunch)])
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> H = G.subgraph([0,1,2])
>>> list(H.edges())
[(0, 1), (1, 2)]
"""
bunch = self.nbunch_iter(nbunch)
# create new graph and copy subgraph into it
H = self.__class__()
# copy node and attribute dictionaries
for n in bunch:
H.node[n] = self.node[n]
# namespace shortcuts for speed
H_adj = H.adj
self_adj = self.adj
# add nodes and edges (undirected method)
for n in H.node:
Hnbrs = H.adjlist_dict_factory()
H_adj[n] = Hnbrs
for nbr, d in self_adj[n].items():
if nbr in H_adj:
# add both representations of edge: n-nbr and nbr-n
Hnbrs[nbr] = d
H_adj[nbr][n] = d
H.graph = self.graph
return H
def nodes_with_selfloops(self):
"""Returns an iterator of nodes with self loops.
A node with a self loop has an edge with both ends adjacent
to that node.
Returns
-------
nodelist : list
A list of nodes with self loops.
See Also
--------
selfloop_edges, number_of_selfloops
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge(1, 1)
>>> G.add_edge(1, 2)
>>> list(G.nodes_with_selfloops())
[1]
"""
return (n for n, nbrs in self.adj.items() if n in nbrs)
def selfloop_edges(self, data=False, default=None):
"""Returns an iterator of selfloop edges.
A selfloop edge has the same node at both ends.
Parameters
----------
data : string or bool, optional (default=False)
Return selfloop edges as two tuples (u,v) (data=False)
or three-tuples (u,v,datadict) (data=True)
or three-tuples (u,v,datavalue) (data='attrname')
default : value, optional (default=None)
Value used for edges that dont have the requested attribute.
Only relevant if data is not True or False.
Returns
-------
edgelist : list of edge tuples
A list of all selfloop edges.
See Also
--------
nodes_with_selfloops, number_of_selfloops
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge(1,1)
>>> G.add_edge(1,2)
>>> list(G.selfloop_edges())
[(1, 1)]
>>> list(G.selfloop_edges(data=True))
[(1, 1, {})]
"""
if data is True:
return ((n, n, nbrs[n])
for n, nbrs in self.adj.items() if n in nbrs)
elif data is not False:
return ((n, n, nbrs[n].get(data, default))
for n, nbrs in self.adj.items() if n in nbrs)
else:
return ((n, n)
for n, nbrs in self.adj.items() if n in nbrs)
def number_of_selfloops(self):
"""Return the number of selfloop edges.
A selfloop edge has the same node at both ends.
Returns
-------
nloops : int
The number of selfloops.
See Also
--------
nodes_with_selfloops, selfloop_edges
Examples
--------
>>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge(1,1)
>>> G.add_edge(1,2)
>>> G.number_of_selfloops()
1
"""
return sum(1 for _ in self.selfloop_edges())
def size(self, weight=None):
"""Return the number of edges.
Parameters
----------
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
Returns
-------
nedges : int
The number of edges or sum of edge weights in the graph.
See Also
--------
number_of_edges
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.size()
3
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge('a','b',weight=2)
>>> G.add_edge('b','c',weight=4)
>>> G.size()
2
>>> G.size(weight='weight')
6.0
"""
s = sum(dict(self.degree(weight=weight)).values()) / 2
if weight is None:
return int(s)
else:
return float(s)
def number_of_edges(self, u=None, v=None):
"""Return the number of edges between two nodes.
Parameters
----------
u,v : nodes, optional (default=all edges)
If u and v are specified, return the number of edges between
u and v. Otherwise return the total number of all edges.
Returns
-------
nedges : int
The number of edges in the graph. If nodes u and v are specified
return the number of edges between those nodes.
See Also
--------
size
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.number_of_edges()
3
>>> G.number_of_edges(0,1)
1
>>> e = (0,1)
>>> G.number_of_edges(*e)
1
"""
if u is None: return int(self.size())
if v in self.adj[u]:
return 1
else:
return 0
def add_star(self, nodes, **attr):
"""Add a star.
The first node in nodes is the middle of the star. It is connected
to all other nodes.
Parameters
----------
nodes : iterable container
A container of nodes.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to every edge in star.
See Also
--------
add_path, add_cycle
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_star([0,1,2,3])
>>> G.add_star([10,11,12],weight=2)
"""
nlist = list(nodes)
v = nlist[0]
edges = ((v, n) for n in nlist[1:])
self.add_edges_from(edges, **attr)
def add_path(self, nodes, **attr):
"""Add a path.
Parameters
----------
nodes : iterable container
A container of nodes. A path will be constructed from
the nodes (in order) and added to the graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to every edge in path.
See Also
--------
add_star, add_cycle
Examples
--------
>>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.add_path([10,11,12],weight=7)
"""
nlist = list(nodes)
edges = zip(nlist[:-1], nlist[1:])
self.add_edges_from(edges, **attr)
def add_cycle(self, nodes, **attr):
"""Add a cycle.
Parameters
----------
nodes: iterable container
A container of nodes. A cycle will be constructed from
the nodes (in order) and added to the graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to every edge in cycle.
See Also
--------
add_path, add_star
Examples
--------
>>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_cycle([0,1,2,3])
>>> G.add_cycle([10,11,12],weight=7)
"""
nlist = list(nodes)
edges = zip(nlist, nlist[1:] + [nlist[0]])
self.add_edges_from(edges, **attr)
def nbunch_iter(self, nbunch=None):
"""Return an iterator of nodes contained in nbunch that are
also in the graph.
The nodes in nbunch are checked for membership in the graph
and if not are silently ignored.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
Returns
-------
niter : iterator
An iterator over nodes in nbunch that are also in the graph.
If nbunch is None, iterate over all nodes in the graph.
Raises
------
NetworkXError
If nbunch is not a node or or sequence of nodes.
If a node in nbunch is not hashable.
See Also
--------
Graph.__iter__
Notes
-----
When nbunch is an iterator, the returned iterator yields values
directly from nbunch, becoming exhausted when nbunch is exhausted.
To test whether nbunch is a single node, one can use
"if nbunch in self:", even after processing with this routine.
If nbunch is not a node or a (possibly empty) sequence/iterator
or None, a NetworkXError is raised. Also, if any object in
nbunch is not hashable, a NetworkXError is raised.
"""
if nbunch is None: # include all nodes via iterator
bunch = iter(self.adj.keys())
elif nbunch in self: # if nbunch is a single node
bunch = iter([nbunch])
else: # if nbunch is a sequence of nodes
def bunch_iter(nlist, adj):
try:
for n in nlist:
if n in adj:
yield n
except TypeError as e:
message = e.args[0]
# capture error for non-sequence/iterator nbunch.
if 'iter' in message:
raise NetworkXError(
"nbunch is not a node or a sequence of nodes.")
# capture error for unhashable node.
elif 'hashable' in message:
raise NetworkXError(
"Node %s in the sequence nbunch is not a valid node."%n)
else:
raise
bunch = bunch_iter(nbunch, self.adj)
return bunch<|fim▁end|> | |
<|file_name|>meta_graph.pb.cc<|end_file_name|><|fim▁begin|>// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: tensorflow/core/protobuf/meta_graph.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include "tensorflow/core/protobuf/meta_graph.pb.h"
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/port.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/generated_message_reflection.h>
#include <google/protobuf/reflection_ops.h>
#include <google/protobuf/wire_format.h>
// @@protoc_insertion_point(includes)
namespace tensorflow {
class MetaGraphDef_MetaInfoDefDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<MetaGraphDef_MetaInfoDef> {
} _MetaGraphDef_MetaInfoDef_default_instance_;
class MetaGraphDefDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<MetaGraphDef> {
} _MetaGraphDef_default_instance_;
class CollectionDef_NodeListDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<CollectionDef_NodeList> {
} _CollectionDef_NodeList_default_instance_;
class CollectionDef_BytesListDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<CollectionDef_BytesList> {
} _CollectionDef_BytesList_default_instance_;
class CollectionDef_Int64ListDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<CollectionDef_Int64List> {
} _CollectionDef_Int64List_default_instance_;
class CollectionDef_FloatListDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<CollectionDef_FloatList> {
} _CollectionDef_FloatList_default_instance_;
class CollectionDef_AnyListDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<CollectionDef_AnyList> {
} _CollectionDef_AnyList_default_instance_;
class CollectionDefDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<CollectionDef> {
public:
const ::tensorflow::CollectionDef_NodeList* node_list_;
const ::tensorflow::CollectionDef_BytesList* bytes_list_;
const ::tensorflow::CollectionDef_Int64List* int64_list_;
const ::tensorflow::CollectionDef_FloatList* float_list_;
const ::tensorflow::CollectionDef_AnyList* any_list_;
} _CollectionDef_default_instance_;
class TensorInfoDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<TensorInfo> {
} _TensorInfo_default_instance_;
class SignatureDefDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<SignatureDef> {
} _SignatureDef_default_instance_;
class AssetFileDefDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<AssetFileDef> {
} _AssetFileDef_default_instance_;
namespace protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto {
namespace {
::google::protobuf::Metadata file_level_metadata[15];
} // namespace
const ::google::protobuf::uint32 TableStruct::offsets[] = {
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef_MetaInfoDef, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef_MetaInfoDef, meta_graph_version_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef_MetaInfoDef, stripped_op_list_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef_MetaInfoDef, any_info_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef_MetaInfoDef, tags_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef_MetaInfoDef, tensorflow_version_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef_MetaInfoDef, tensorflow_git_version_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef, meta_info_def_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef, graph_def_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef, saver_def_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef, collection_def_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef, signature_def_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(MetaGraphDef, asset_file_def_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_NodeList, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_NodeList, value_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_BytesList, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_BytesList, value_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_Int64List, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_Int64List, value_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_FloatList, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_FloatList, value_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_AnyList, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef_AnyList, value_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef, _internal_metadata_),
~0u, // no _extensions_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef, _oneof_case_[0]),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET((&_CollectionDef_default_instance_), node_list_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET((&_CollectionDef_default_instance_), bytes_list_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET((&_CollectionDef_default_instance_), int64_list_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET((&_CollectionDef_default_instance_), float_list_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET((&_CollectionDef_default_instance_), any_list_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CollectionDef, kind_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(TensorInfo, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(TensorInfo, name_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(TensorInfo, dtype_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(TensorInfo, tensor_shape_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SignatureDef, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SignatureDef, inputs_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SignatureDef, outputs_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(SignatureDef, method_name_),
~0u, // no _has_bits_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AssetFileDef, _internal_metadata_),
~0u, // no _extensions_
~0u, // no _oneof_case_
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AssetFileDef, tensor_info_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AssetFileDef, filename_),
};
static const ::google::protobuf::internal::MigrationSchema schemas[] = {
{ 0, -1, sizeof(MetaGraphDef_MetaInfoDef)},
{ 10, -1, sizeof(MetaGraphDef)},
{ 20, -1, sizeof(CollectionDef_NodeList)},
{ 25, -1, sizeof(CollectionDef_BytesList)},
{ 30, -1, sizeof(CollectionDef_Int64List)},
{ 35, -1, sizeof(CollectionDef_FloatList)},
{ 40, -1, sizeof(CollectionDef_AnyList)},
{ 45, -1, sizeof(CollectionDef)},
{ 55, -1, sizeof(TensorInfo)},
{ 62, -1, sizeof(SignatureDef)},
{ 69, -1, sizeof(AssetFileDef)},
};
static ::google::protobuf::Message const * const file_default_instances[] = {
reinterpret_cast<const ::google::protobuf::Message*>(&_MetaGraphDef_MetaInfoDef_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_MetaGraphDef_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_CollectionDef_NodeList_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_CollectionDef_BytesList_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_CollectionDef_Int64List_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_CollectionDef_FloatList_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_CollectionDef_AnyList_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_CollectionDef_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_TensorInfo_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_SignatureDef_default_instance_),
reinterpret_cast<const ::google::protobuf::Message*>(&_AssetFileDef_default_instance_),
};
namespace {
void protobuf_AssignDescriptors() {
AddDescriptors();
::google::protobuf::MessageFactory* factory = NULL;
AssignDescriptors(
"tensorflow/core/protobuf/meta_graph.proto", schemas, file_default_instances, TableStruct::offsets, factory,
file_level_metadata, NULL, NULL);
}
void protobuf_AssignDescriptorsOnce() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &protobuf_AssignDescriptors);
}
void protobuf_RegisterTypes(const ::std::string&) GOOGLE_ATTRIBUTE_COLD;
void protobuf_RegisterTypes(const ::std::string&) {
protobuf_AssignDescriptorsOnce();
::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 15);
const ::google::protobuf::Descriptor* MetaGraphDef_CollectionDefEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[1].descriptor;
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
MetaGraphDef_CollectionDefEntry_descriptor,
::google::protobuf::internal::MapEntry<
::std::string,
::tensorflow::CollectionDef,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0>::CreateDefaultInstance(
MetaGraphDef_CollectionDefEntry_descriptor));
const ::google::protobuf::Descriptor* MetaGraphDef_SignatureDefEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[2].descriptor;
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
MetaGraphDef_SignatureDefEntry_descriptor,
::google::protobuf::internal::MapEntry<
::std::string,
::tensorflow::SignatureDef,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0>::CreateDefaultInstance(
MetaGraphDef_SignatureDefEntry_descriptor));
const ::google::protobuf::Descriptor* SignatureDef_InputsEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[11].descriptor;
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
SignatureDef_InputsEntry_descriptor,
::google::protobuf::internal::MapEntry<
::std::string,
::tensorflow::TensorInfo,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0>::CreateDefaultInstance(
SignatureDef_InputsEntry_descriptor));
const ::google::protobuf::Descriptor* SignatureDef_OutputsEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[12].descriptor;
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
SignatureDef_OutputsEntry_descriptor,
::google::protobuf::internal::MapEntry<
::std::string,
::tensorflow::TensorInfo,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0>::CreateDefaultInstance(
SignatureDef_OutputsEntry_descriptor));
}
} // namespace
void TableStruct::Shutdown() {
_MetaGraphDef_MetaInfoDef_default_instance_.Shutdown();
delete file_level_metadata[0].reflection;
_MetaGraphDef_default_instance_.Shutdown();
delete file_level_metadata[3].reflection;
_CollectionDef_NodeList_default_instance_.Shutdown();
delete file_level_metadata[4].reflection;
_CollectionDef_BytesList_default_instance_.Shutdown();
delete file_level_metadata[5].reflection;
_CollectionDef_Int64List_default_instance_.Shutdown();
delete file_level_metadata[6].reflection;
_CollectionDef_FloatList_default_instance_.Shutdown();
delete file_level_metadata[7].reflection;
_CollectionDef_AnyList_default_instance_.Shutdown();
delete file_level_metadata[8].reflection;
_CollectionDef_default_instance_.Shutdown();
delete file_level_metadata[9].reflection;
_TensorInfo_default_instance_.Shutdown();
delete file_level_metadata[10].reflection;
_SignatureDef_default_instance_.Shutdown();
delete file_level_metadata[13].reflection;
_AssetFileDef_default_instance_.Shutdown();
delete file_level_metadata[14].reflection;
}
void TableStruct::InitDefaultsImpl() {
GOOGLE_PROTOBUF_VERIFY_VERSION;
::google::protobuf::internal::InitProtobufDefaults();
::google::protobuf::protobuf_google_2fprotobuf_2fany_2eproto::InitDefaults();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2fgraph_2eproto::InitDefaults();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2fop_5fdef_2eproto::InitDefaults();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2ftensor_5fshape_2eproto::InitDefaults();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2ftypes_2eproto::InitDefaults();
::tensorflow::protobuf_tensorflow_2fcore_2fprotobuf_2fsaver_2eproto::InitDefaults();
_MetaGraphDef_MetaInfoDef_default_instance_.DefaultConstruct();
_MetaGraphDef_default_instance_.DefaultConstruct();
_CollectionDef_NodeList_default_instance_.DefaultConstruct();
_CollectionDef_BytesList_default_instance_.DefaultConstruct();
_CollectionDef_Int64List_default_instance_.DefaultConstruct();
_CollectionDef_FloatList_default_instance_.DefaultConstruct();
_CollectionDef_AnyList_default_instance_.DefaultConstruct();
_CollectionDef_default_instance_.DefaultConstruct();
_TensorInfo_default_instance_.DefaultConstruct();
_SignatureDef_default_instance_.DefaultConstruct();
_AssetFileDef_default_instance_.DefaultConstruct();
_MetaGraphDef_MetaInfoDef_default_instance_.get_mutable()->stripped_op_list_ = const_cast< ::tensorflow::OpList*>(
::tensorflow::OpList::internal_default_instance());
_MetaGraphDef_MetaInfoDef_default_instance_.get_mutable()->any_info_ = const_cast< ::google::protobuf::Any*>(
::google::protobuf::Any::internal_default_instance());
_MetaGraphDef_default_instance_.get_mutable()->meta_info_def_ = const_cast< ::tensorflow::MetaGraphDef_MetaInfoDef*>(
::tensorflow::MetaGraphDef_MetaInfoDef::internal_default_instance());
_MetaGraphDef_default_instance_.get_mutable()->graph_def_ = const_cast< ::tensorflow::GraphDef*>(
::tensorflow::GraphDef::internal_default_instance());
_MetaGraphDef_default_instance_.get_mutable()->saver_def_ = const_cast< ::tensorflow::SaverDef*>(
::tensorflow::SaverDef::internal_default_instance());
_CollectionDef_default_instance_.node_list_ = const_cast< ::tensorflow::CollectionDef_NodeList*>(
::tensorflow::CollectionDef_NodeList::internal_default_instance());
_CollectionDef_default_instance_.bytes_list_ = const_cast< ::tensorflow::CollectionDef_BytesList*>(
::tensorflow::CollectionDef_BytesList::internal_default_instance());
_CollectionDef_default_instance_.int64_list_ = const_cast< ::tensorflow::CollectionDef_Int64List*>(
::tensorflow::CollectionDef_Int64List::internal_default_instance());
_CollectionDef_default_instance_.float_list_ = const_cast< ::tensorflow::CollectionDef_FloatList*>(
::tensorflow::CollectionDef_FloatList::internal_default_instance());
_CollectionDef_default_instance_.any_list_ = const_cast< ::tensorflow::CollectionDef_AnyList*>(
::tensorflow::CollectionDef_AnyList::internal_default_instance());
_TensorInfo_default_instance_.get_mutable()->tensor_shape_ = const_cast< ::tensorflow::TensorShapeProto*>(
::tensorflow::TensorShapeProto::internal_default_instance());
_AssetFileDef_default_instance_.get_mutable()->tensor_info_ = const_cast< ::tensorflow::TensorInfo*>(
::tensorflow::TensorInfo::internal_default_instance());
}
void InitDefaults() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &TableStruct::InitDefaultsImpl);
}
void AddDescriptorsImpl() {
InitDefaults();
static const char descriptor[] = {
"\n)tensorflow/core/protobuf/meta_graph.pr"
"oto\022\ntensorflow\032\031google/protobuf/any.pro"
"to\032%tensorflow/core/framework/graph.prot"
"o\032&tensorflow/core/framework/op_def.prot"
"o\032,tensorflow/core/framework/tensor_shap"
"e.proto\032%tensorflow/core/framework/types"
".proto\032$tensorflow/core/protobuf/saver.p"
"roto\"\303\005\n\014MetaGraphDef\022;\n\rmeta_info_def\030\001"
" \001(\0132$.tensorflow.MetaGraphDef.MetaInfoD"
"ef\022\'\n\tgraph_def\030\002 \001(\0132\024.tensorflow.Graph"
"Def\022\'\n\tsaver_def\030\003 \001(\0132\024.tensorflow.Save"
"rDef\022C\n\016collection_def\030\004 \003(\0132+.tensorflo"
"w.MetaGraphDef.CollectionDefEntry\022A\n\rsig"
"nature_def\030\005 \003(\0132*.tensorflow.MetaGraphD"
"ef.SignatureDefEntry\0220\n\016asset_file_def\030\006"
" \003(\0132\030.tensorflow.AssetFileDef\032\311\001\n\013MetaI"
"nfoDef\022\032\n\022meta_graph_version\030\001 \001(\t\022,\n\020st"
"ripped_op_list\030\002 \001(\0132\022.tensorflow.OpList"
"\022&\n\010any_info\030\003 \001(\0132\024.google.protobuf.Any"
"\022\014\n\004tags\030\004 \003(\t\022\032\n\022tensorflow_version\030\005 \001"
"(\t\022\036\n\026tensorflow_git_version\030\006 \001(\t\032O\n\022Co"
"llectionDefEntry\022\013\n\003key\030\001 \001(\t\022(\n\005value\030\002"
" \001(\0132\031.tensorflow.CollectionDef:\0028\001\032M\n\021S"
"ignatureDefEntry\022\013\n\003key\030\001 \001(\t\022\'\n\005value\030\002"
" \001(\0132\030.tensorflow.SignatureDef:\0028\001\"\337\003\n\rC"
"ollectionDef\0227\n\tnode_list\030\001 \001(\0132\".tensor"
"flow.CollectionDef.NodeListH\000\0229\n\nbytes_l"
"ist\030\002 \001(\0132#.tensorflow.CollectionDef.Byt"
"esListH\000\0229\n\nint64_list\030\003 \001(\0132#.tensorflo"
"w.CollectionDef.Int64ListH\000\0229\n\nfloat_lis"
"t\030\004 \001(\0132#.tensorflow.CollectionDef.Float"
"ListH\000\0225\n\010any_list\030\005 \001(\0132!.tensorflow.Co"
"llectionDef.AnyListH\000\032\031\n\010NodeList\022\r\n\005val"
"ue\030\001 \003(\t\032\032\n\tBytesList\022\r\n\005value\030\001 \003(\014\032\036\n\t"
"Int64List\022\021\n\005value\030\001 \003(\003B\002\020\001\032\036\n\tFloatLis"
"t\022\021\n\005value\030\001 \003(\002B\002\020\001\032.\n\007AnyList\022#\n\005value"
"\030\001 \003(\0132\024.google.protobuf.AnyB\006\n\004kind\"s\n\n"
"TensorInfo\022\014\n\004name\030\001 \001(\t\022#\n\005dtype\030\002 \001(\0162"
"\024.tensorflow.DataType\0222\n\014tensor_shape\030\003 "
"\001(\0132\034.tensorflow.TensorShapeProto\"\240\002\n\014Si"
"gnatureDef\0224\n\006inputs\030\001 \003(\0132$.tensorflow."
"SignatureDef.InputsEntry\0226\n\007outputs\030\002 \003("
"\0132%.tensorflow.SignatureDef.OutputsEntry"
"\022\023\n\013method_name\030\003 \001(\t\032E\n\013InputsEntry\022\013\n\003"
"key\030\001 \001(\t\022%\n\005value\030\002 \001(\0132\026.tensorflow.Te"
"nsorInfo:\0028\001\032F\n\014OutputsEntry\022\013\n\003key\030\001 \001("
"\t\022%\n\005value\030\002 \001(\0132\026.tensorflow.TensorInfo"
":\0028\001\"M\n\014AssetFileDef\022+\n\013tensor_info\030\001 \001("
"\0132\026.tensorflow.TensorInfo\022\020\n\010filename\030\002 "
"\001(\tB0\n\030org.tensorflow.frameworkB\017MetaGra"
"phProtosP\001\370\001\001b\006proto3"
};
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
descriptor, 2021);
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
"tensorflow/core/protobuf/meta_graph.proto", &protobuf_RegisterTypes);
::google::protobuf::protobuf_google_2fprotobuf_2fany_2eproto::AddDescriptors();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2fgraph_2eproto::AddDescriptors();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2fop_5fdef_2eproto::AddDescriptors();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2ftensor_5fshape_2eproto::AddDescriptors();
::tensorflow::protobuf_tensorflow_2fcore_2fframework_2ftypes_2eproto::AddDescriptors();
::tensorflow::protobuf_tensorflow_2fcore_2fprotobuf_2fsaver_2eproto::AddDescriptors();
::google::protobuf::internal::OnShutdown(&TableStruct::Shutdown);
}
void AddDescriptors() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl);
}
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer {
StaticDescriptorInitializer() {
AddDescriptors();
}
} static_descriptor_initializer;
} // namespace protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto
// ===================================================================
void MetaGraphDef_MetaInfoDef::_slow_mutable_stripped_op_list() {
stripped_op_list_ = ::google::protobuf::Arena::CreateMessage< ::tensorflow::OpList >(
GetArenaNoVirtual());
}
::tensorflow::OpList* MetaGraphDef_MetaInfoDef::_slow_release_stripped_op_list() {
if (stripped_op_list_ == NULL) {
return NULL;
} else {
::tensorflow::OpList* temp = new ::tensorflow::OpList(*stripped_op_list_);
stripped_op_list_ = NULL;
return temp;
}
}
::tensorflow::OpList* MetaGraphDef_MetaInfoDef::unsafe_arena_release_stripped_op_list() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.MetaInfoDef.stripped_op_list)
::tensorflow::OpList* temp = stripped_op_list_;
stripped_op_list_ = NULL;
return temp;
}
void MetaGraphDef_MetaInfoDef::_slow_set_allocated_stripped_op_list(
::google::protobuf::Arena* message_arena, ::tensorflow::OpList** stripped_op_list) {
if (message_arena != NULL &&
::google::protobuf::Arena::GetArena(*stripped_op_list) == NULL) {
message_arena->Own(*stripped_op_list);
} else if (message_arena !=
::google::protobuf::Arena::GetArena(*stripped_op_list)) {
::tensorflow::OpList* new_stripped_op_list =
::google::protobuf::Arena::CreateMessage< ::tensorflow::OpList >(
message_arena);
new_stripped_op_list->CopyFrom(**stripped_op_list);
*stripped_op_list = new_stripped_op_list;
}
}
void MetaGraphDef_MetaInfoDef::unsafe_arena_set_allocated_stripped_op_list(
::tensorflow::OpList* stripped_op_list) {
if (GetArenaNoVirtual() == NULL) {
delete stripped_op_list_;
}
stripped_op_list_ = stripped_op_list;
if (stripped_op_list) {
} else {
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.stripped_op_list)
}
void MetaGraphDef_MetaInfoDef::_slow_mutable_any_info() {
any_info_ = ::google::protobuf::Arena::Create< ::google::protobuf::Any >(
GetArenaNoVirtual());
}
::google::protobuf::Any* MetaGraphDef_MetaInfoDef::_slow_release_any_info() {
if (any_info_ == NULL) {
return NULL;
} else {
::google::protobuf::Any* temp = new ::google::protobuf::Any(*any_info_);
any_info_ = NULL;
return temp;
}
}
::google::protobuf::Any* MetaGraphDef_MetaInfoDef::unsafe_arena_release_any_info() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.MetaInfoDef.any_info)
::google::protobuf::Any* temp = any_info_;
any_info_ = NULL;
return temp;
}
void MetaGraphDef_MetaInfoDef::unsafe_arena_set_allocated_any_info(
::google::protobuf::Any* any_info) {
if (GetArenaNoVirtual() == NULL) {
delete any_info_;
}
any_info_ = any_info;
if (any_info) {
} else {
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.any_info)
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int MetaGraphDef_MetaInfoDef::kMetaGraphVersionFieldNumber;
const int MetaGraphDef_MetaInfoDef::kStrippedOpListFieldNumber;
const int MetaGraphDef_MetaInfoDef::kAnyInfoFieldNumber;
const int MetaGraphDef_MetaInfoDef::kTagsFieldNumber;
const int MetaGraphDef_MetaInfoDef::kTensorflowVersionFieldNumber;
const int MetaGraphDef_MetaInfoDef::kTensorflowGitVersionFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
MetaGraphDef_MetaInfoDef::MetaGraphDef_MetaInfoDef()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.MetaGraphDef.MetaInfoDef)
}
MetaGraphDef_MetaInfoDef::MetaGraphDef_MetaInfoDef(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
tags_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.MetaGraphDef.MetaInfoDef)
}
MetaGraphDef_MetaInfoDef::MetaGraphDef_MetaInfoDef(const MetaGraphDef_MetaInfoDef& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
tags_(from.tags_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
meta_graph_version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.meta_graph_version().size() > 0) {
meta_graph_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.meta_graph_version(),
GetArenaNoVirtual());
}
tensorflow_version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.tensorflow_version().size() > 0) {
tensorflow_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.tensorflow_version(),
GetArenaNoVirtual());
}
tensorflow_git_version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.tensorflow_git_version().size() > 0) {
tensorflow_git_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.tensorflow_git_version(),
GetArenaNoVirtual());
}
if (from.has_stripped_op_list()) {
stripped_op_list_ = new ::tensorflow::OpList(*from.stripped_op_list_);
} else {
stripped_op_list_ = NULL;
}
if (from.has_any_info()) {
any_info_ = new ::google::protobuf::Any(*from.any_info_);
} else {
any_info_ = NULL;
}
// @@protoc_insertion_point(copy_constructor:tensorflow.MetaGraphDef.MetaInfoDef)
}
void MetaGraphDef_MetaInfoDef::SharedCtor() {
meta_graph_version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
tensorflow_version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
tensorflow_git_version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
::memset(&stripped_op_list_, 0, reinterpret_cast<char*>(&any_info_) -
reinterpret_cast<char*>(&stripped_op_list_) + sizeof(any_info_));
_cached_size_ = 0;
}
MetaGraphDef_MetaInfoDef::~MetaGraphDef_MetaInfoDef() {
// @@protoc_insertion_point(destructor:tensorflow.MetaGraphDef.MetaInfoDef)
SharedDtor();
}
void MetaGraphDef_MetaInfoDef::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
meta_graph_version_.Destroy(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), arena);
tensorflow_version_.Destroy(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), arena);
tensorflow_git_version_.Destroy(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), arena);
if (this != internal_default_instance()) {
delete stripped_op_list_;
}
if (this != internal_default_instance()) {
delete any_info_;
}
}
void MetaGraphDef_MetaInfoDef::ArenaDtor(void* object) {
MetaGraphDef_MetaInfoDef* _this = reinterpret_cast< MetaGraphDef_MetaInfoDef* >(object);
(void)_this;
}
void MetaGraphDef_MetaInfoDef::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void MetaGraphDef_MetaInfoDef::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* MetaGraphDef_MetaInfoDef::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[0].descriptor;
}
const MetaGraphDef_MetaInfoDef& MetaGraphDef_MetaInfoDef::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
MetaGraphDef_MetaInfoDef* MetaGraphDef_MetaInfoDef::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<MetaGraphDef_MetaInfoDef>(arena);
}
void MetaGraphDef_MetaInfoDef::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.MetaGraphDef.MetaInfoDef)
tags_.Clear();
meta_graph_version_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
tensorflow_version_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
tensorflow_git_version_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
if (GetArenaNoVirtual() == NULL && stripped_op_list_ != NULL) {
delete stripped_op_list_;
}
stripped_op_list_ = NULL;
if (GetArenaNoVirtual() == NULL && any_info_ != NULL) {
delete any_info_;
}
any_info_ = NULL;
}
bool MetaGraphDef_MetaInfoDef::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.MetaGraphDef.MetaInfoDef)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// string meta_graph_version = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_meta_graph_version()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->meta_graph_version().data(), this->meta_graph_version().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version"));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.OpList stripped_op_list = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_stripped_op_list()));
} else {
goto handle_unusual;
}
break;
}
// .google.protobuf.Any any_info = 3;
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(26u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_any_info()));
} else {
goto handle_unusual;
}
break;
}
// repeated string tags = 4;
case 4: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(34u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->add_tags()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tags(this->tags_size() - 1).data(),
this->tags(this->tags_size() - 1).length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.MetaGraphDef.MetaInfoDef.tags"));
} else {
goto handle_unusual;
}
break;
}
// string tensorflow_version = 5;
case 5: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(42u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_tensorflow_version()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tensorflow_version().data(), this->tensorflow_version().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version"));
} else {
goto handle_unusual;
}
break;
}
// string tensorflow_git_version = 6;
case 6: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(50u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_tensorflow_git_version()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tensorflow_git_version().data(), this->tensorflow_git_version().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version"));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.MetaGraphDef.MetaInfoDef)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.MetaGraphDef.MetaInfoDef)
return false;
#undef DO_
}
void MetaGraphDef_MetaInfoDef::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.MetaGraphDef.MetaInfoDef)
// string meta_graph_version = 1;
if (this->meta_graph_version().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->meta_graph_version().data(), this->meta_graph_version().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
1, this->meta_graph_version(), output);
}
// .tensorflow.OpList stripped_op_list = 2;
if (this->has_stripped_op_list()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, *this->stripped_op_list_, output);
}
// .google.protobuf.Any any_info = 3;
if (this->has_any_info()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, *this->any_info_, output);
}
// repeated string tags = 4;
for (int i = 0, n = this->tags_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tags(i).data(), this->tags(i).length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.tags");
::google::protobuf::internal::WireFormatLite::WriteString(
4, this->tags(i), output);
}
// string tensorflow_version = 5;
if (this->tensorflow_version().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tensorflow_version().data(), this->tensorflow_version().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
5, this->tensorflow_version(), output);
}
// string tensorflow_git_version = 6;
if (this->tensorflow_git_version().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tensorflow_git_version().data(), this->tensorflow_git_version().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
6, this->tensorflow_git_version(), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.MetaGraphDef.MetaInfoDef)
}
::google::protobuf::uint8* MetaGraphDef_MetaInfoDef::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.MetaGraphDef.MetaInfoDef)
// string meta_graph_version = 1;
if (this->meta_graph_version().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->meta_graph_version().data(), this->meta_graph_version().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version");
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
1, this->meta_graph_version(), target);
}
// .tensorflow.OpList stripped_op_list = 2;
if (this->has_stripped_op_list()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
2, *this->stripped_op_list_, false, target);
}
// .google.protobuf.Any any_info = 3;
if (this->has_any_info()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
3, *this->any_info_, false, target);
}
// repeated string tags = 4;
for (int i = 0, n = this->tags_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tags(i).data(), this->tags(i).length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.tags");
target = ::google::protobuf::internal::WireFormatLite::
WriteStringToArray(4, this->tags(i), target);
}
// string tensorflow_version = 5;
if (this->tensorflow_version().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tensorflow_version().data(), this->tensorflow_version().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version");
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
5, this->tensorflow_version(), target);
}
// string tensorflow_git_version = 6;
if (this->tensorflow_git_version().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->tensorflow_git_version().data(), this->tensorflow_git_version().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version");
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
6, this->tensorflow_git_version(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.MetaGraphDef.MetaInfoDef)
return target;
}
size_t MetaGraphDef_MetaInfoDef::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.MetaGraphDef.MetaInfoDef)
size_t total_size = 0;
// repeated string tags = 4;
total_size += 1 *
::google::protobuf::internal::FromIntSize(this->tags_size());
for (int i = 0, n = this->tags_size(); i < n; i++) {
total_size += ::google::protobuf::internal::WireFormatLite::StringSize(
this->tags(i));
}
// string meta_graph_version = 1;
if (this->meta_graph_version().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->meta_graph_version());
}
// string tensorflow_version = 5;
if (this->tensorflow_version().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->tensorflow_version());
}
// string tensorflow_git_version = 6;
if (this->tensorflow_git_version().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->tensorflow_git_version());
}
// .tensorflow.OpList stripped_op_list = 2;
if (this->has_stripped_op_list()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->stripped_op_list_);
}
// .google.protobuf.Any any_info = 3;
if (this->has_any_info()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->any_info_);
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void MetaGraphDef_MetaInfoDef::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.MetaGraphDef.MetaInfoDef)
GOOGLE_DCHECK_NE(&from, this);
const MetaGraphDef_MetaInfoDef* source =
::google::protobuf::internal::DynamicCastToGenerated<const MetaGraphDef_MetaInfoDef>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.MetaGraphDef.MetaInfoDef)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.MetaGraphDef.MetaInfoDef)
MergeFrom(*source);
}
}
void MetaGraphDef_MetaInfoDef::MergeFrom(const MetaGraphDef_MetaInfoDef& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.MetaGraphDef.MetaInfoDef)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
tags_.MergeFrom(from.tags_);
if (from.meta_graph_version().size() > 0) {
set_meta_graph_version(from.meta_graph_version());
}
if (from.tensorflow_version().size() > 0) {
set_tensorflow_version(from.tensorflow_version());
}
if (from.tensorflow_git_version().size() > 0) {
set_tensorflow_git_version(from.tensorflow_git_version());
}
if (from.has_stripped_op_list()) {
mutable_stripped_op_list()->::tensorflow::OpList::MergeFrom(from.stripped_op_list());
}
if (from.has_any_info()) {
mutable_any_info()->::google::protobuf::Any::MergeFrom(from.any_info());
}
}
void MetaGraphDef_MetaInfoDef::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.MetaGraphDef.MetaInfoDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void MetaGraphDef_MetaInfoDef::CopyFrom(const MetaGraphDef_MetaInfoDef& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.MetaGraphDef.MetaInfoDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool MetaGraphDef_MetaInfoDef::IsInitialized() const {
return true;
}
void MetaGraphDef_MetaInfoDef::Swap(MetaGraphDef_MetaInfoDef* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
MetaGraphDef_MetaInfoDef* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void MetaGraphDef_MetaInfoDef::UnsafeArenaSwap(MetaGraphDef_MetaInfoDef* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void MetaGraphDef_MetaInfoDef::InternalSwap(MetaGraphDef_MetaInfoDef* other) {
tags_.UnsafeArenaSwap(&other->tags_);
meta_graph_version_.Swap(&other->meta_graph_version_);
tensorflow_version_.Swap(&other->tensorflow_version_);
tensorflow_git_version_.Swap(&other->tensorflow_git_version_);
std::swap(stripped_op_list_, other->stripped_op_list_);
std::swap(any_info_, other->any_info_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata MetaGraphDef_MetaInfoDef::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[0];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// MetaGraphDef_MetaInfoDef
// string meta_graph_version = 1;
void MetaGraphDef_MetaInfoDef::clear_meta_graph_version() {
meta_graph_version_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
const ::std::string& MetaGraphDef_MetaInfoDef::meta_graph_version() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
return meta_graph_version_.Get();
}
void MetaGraphDef_MetaInfoDef::set_meta_graph_version(const ::std::string& value) {
meta_graph_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
// @@protoc_insertion_point(field_set:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
}
void MetaGraphDef_MetaInfoDef::set_meta_graph_version(const char* value) {
meta_graph_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_char:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
}
void MetaGraphDef_MetaInfoDef::set_meta_graph_version(const char* value,
size_t size) {
meta_graph_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_pointer:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
}
::std::string* MetaGraphDef_MetaInfoDef::mutable_meta_graph_version() {
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
return meta_graph_version_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* MetaGraphDef_MetaInfoDef::release_meta_graph_version() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
return meta_graph_version_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* MetaGraphDef_MetaInfoDef::unsafe_arena_release_meta_graph_version() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
return meta_graph_version_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
GetArenaNoVirtual());
}
void MetaGraphDef_MetaInfoDef::set_allocated_meta_graph_version(::std::string* meta_graph_version) {
if (meta_graph_version != NULL) {
} else {
}
meta_graph_version_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), meta_graph_version,
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
}
void MetaGraphDef_MetaInfoDef::unsafe_arena_set_allocated_meta_graph_version(
::std::string* meta_graph_version) {
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
if (meta_graph_version != NULL) {
} else {
}
meta_graph_version_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
meta_graph_version, GetArenaNoVirtual());
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.meta_graph_version)
}
// .tensorflow.OpList stripped_op_list = 2;
bool MetaGraphDef_MetaInfoDef::has_stripped_op_list() const {
return this != internal_default_instance() && stripped_op_list_ != NULL;
}
void MetaGraphDef_MetaInfoDef::clear_stripped_op_list() {
if (GetArenaNoVirtual() == NULL && stripped_op_list_ != NULL) delete stripped_op_list_;
stripped_op_list_ = NULL;
}
const ::tensorflow::OpList& MetaGraphDef_MetaInfoDef::stripped_op_list() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.MetaInfoDef.stripped_op_list)
return stripped_op_list_ != NULL ? *stripped_op_list_
: *::tensorflow::OpList::internal_default_instance();
}
::tensorflow::OpList* MetaGraphDef_MetaInfoDef::mutable_stripped_op_list() {
if (stripped_op_list_ == NULL) {
_slow_mutable_stripped_op_list();
}
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.MetaInfoDef.stripped_op_list)
return stripped_op_list_;
}
::tensorflow::OpList* MetaGraphDef_MetaInfoDef::release_stripped_op_list() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.MetaInfoDef.stripped_op_list)
if (GetArenaNoVirtual() != NULL) {
return _slow_release_stripped_op_list();
} else {
::tensorflow::OpList* temp = stripped_op_list_;
stripped_op_list_ = NULL;
return temp;
}
}
void MetaGraphDef_MetaInfoDef::set_allocated_stripped_op_list(::tensorflow::OpList* stripped_op_list) {
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
if (message_arena == NULL) {
delete stripped_op_list_;
}
if (stripped_op_list != NULL) {
_slow_set_allocated_stripped_op_list(message_arena, &stripped_op_list);
}
stripped_op_list_ = stripped_op_list;
if (stripped_op_list) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.stripped_op_list)
}
// .google.protobuf.Any any_info = 3;
bool MetaGraphDef_MetaInfoDef::has_any_info() const {
return this != internal_default_instance() && any_info_ != NULL;
}
void MetaGraphDef_MetaInfoDef::clear_any_info() {
if (GetArenaNoVirtual() == NULL && any_info_ != NULL) delete any_info_;
any_info_ = NULL;
}
const ::google::protobuf::Any& MetaGraphDef_MetaInfoDef::any_info() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.MetaInfoDef.any_info)
return any_info_ != NULL ? *any_info_
: *::google::protobuf::Any::internal_default_instance();
}
::google::protobuf::Any* MetaGraphDef_MetaInfoDef::mutable_any_info() {
if (any_info_ == NULL) {
_slow_mutable_any_info();
}
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.MetaInfoDef.any_info)
return any_info_;
}
::google::protobuf::Any* MetaGraphDef_MetaInfoDef::release_any_info() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.MetaInfoDef.any_info)
if (GetArenaNoVirtual() != NULL) {
return _slow_release_any_info();
} else {
::google::protobuf::Any* temp = any_info_;
any_info_ = NULL;
return temp;
}
}
void MetaGraphDef_MetaInfoDef::set_allocated_any_info(::google::protobuf::Any* any_info) {
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
if (message_arena == NULL) {
delete any_info_;
}
if (any_info != NULL) {
if (message_arena != NULL) {
message_arena->Own(any_info);
}
}
any_info_ = any_info;
if (any_info) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.any_info)
}
// repeated string tags = 4;
int MetaGraphDef_MetaInfoDef::tags_size() const {
return tags_.size();
}
void MetaGraphDef_MetaInfoDef::clear_tags() {
tags_.Clear();
}
const ::std::string& MetaGraphDef_MetaInfoDef::tags(int index) const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.MetaInfoDef.tags)
return tags_.Get(index);
}
::std::string* MetaGraphDef_MetaInfoDef::mutable_tags(int index) {
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.MetaInfoDef.tags)
return tags_.Mutable(index);
}
void MetaGraphDef_MetaInfoDef::set_tags(int index, const ::std::string& value) {
// @@protoc_insertion_point(field_set:tensorflow.MetaGraphDef.MetaInfoDef.tags)
tags_.Mutable(index)->assign(value);
}
#if LANG_CXX11
void MetaGraphDef_MetaInfoDef::set_tags(int index, ::std::string&& value) {
// @@protoc_insertion_point(field_set:tensorflow.MetaGraphDef.MetaInfoDef.tags)
tags_.Mutable(index)->assign(std::move(value));
}
#endif
void MetaGraphDef_MetaInfoDef::set_tags(int index, const char* value) {
tags_.Mutable(index)->assign(value);
// @@protoc_insertion_point(field_set_char:tensorflow.MetaGraphDef.MetaInfoDef.tags)
}
void MetaGraphDef_MetaInfoDef::set_tags(int index, const char* value, size_t size) {
tags_.Mutable(index)->assign(
reinterpret_cast<const char*>(value), size);
// @@protoc_insertion_point(field_set_pointer:tensorflow.MetaGraphDef.MetaInfoDef.tags)
}
::std::string* MetaGraphDef_MetaInfoDef::add_tags() {
// @@protoc_insertion_point(field_add_mutable:tensorflow.MetaGraphDef.MetaInfoDef.tags)
return tags_.Add();
}
void MetaGraphDef_MetaInfoDef::add_tags(const ::std::string& value) {
tags_.Add()->assign(value);
// @@protoc_insertion_point(field_add:tensorflow.MetaGraphDef.MetaInfoDef.tags)
}
#if LANG_CXX11
void MetaGraphDef_MetaInfoDef::add_tags(::std::string&& value) {
tags_.Add()->assign(std::move(value));
// @@protoc_insertion_point(field_add:tensorflow.MetaGraphDef.MetaInfoDef.tags)
}
#endif
void MetaGraphDef_MetaInfoDef::add_tags(const char* value) {
tags_.Add()->assign(value);
// @@protoc_insertion_point(field_add_char:tensorflow.MetaGraphDef.MetaInfoDef.tags)
}
void MetaGraphDef_MetaInfoDef::add_tags(const char* value, size_t size) {
tags_.Add()->assign(reinterpret_cast<const char*>(value), size);
// @@protoc_insertion_point(field_add_pointer:tensorflow.MetaGraphDef.MetaInfoDef.tags)
}
const ::google::protobuf::RepeatedPtrField< ::std::string>&
MetaGraphDef_MetaInfoDef::tags() const {
// @@protoc_insertion_point(field_list:tensorflow.MetaGraphDef.MetaInfoDef.tags)
return tags_;
}
::google::protobuf::RepeatedPtrField< ::std::string>*
MetaGraphDef_MetaInfoDef::mutable_tags() {
// @@protoc_insertion_point(field_mutable_list:tensorflow.MetaGraphDef.MetaInfoDef.tags)
return &tags_;
}
// string tensorflow_version = 5;
void MetaGraphDef_MetaInfoDef::clear_tensorflow_version() {
tensorflow_version_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
const ::std::string& MetaGraphDef_MetaInfoDef::tensorflow_version() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
return tensorflow_version_.Get();
}
void MetaGraphDef_MetaInfoDef::set_tensorflow_version(const ::std::string& value) {
tensorflow_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
// @@protoc_insertion_point(field_set:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
}
void MetaGraphDef_MetaInfoDef::set_tensorflow_version(const char* value) {
tensorflow_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_char:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
}
void MetaGraphDef_MetaInfoDef::set_tensorflow_version(const char* value,
size_t size) {
tensorflow_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_pointer:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
}
::std::string* MetaGraphDef_MetaInfoDef::mutable_tensorflow_version() {
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
return tensorflow_version_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* MetaGraphDef_MetaInfoDef::release_tensorflow_version() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
return tensorflow_version_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* MetaGraphDef_MetaInfoDef::unsafe_arena_release_tensorflow_version() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
return tensorflow_version_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
GetArenaNoVirtual());
}
void MetaGraphDef_MetaInfoDef::set_allocated_tensorflow_version(::std::string* tensorflow_version) {
if (tensorflow_version != NULL) {
} else {
}
tensorflow_version_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), tensorflow_version,
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
}
void MetaGraphDef_MetaInfoDef::unsafe_arena_set_allocated_tensorflow_version(
::std::string* tensorflow_version) {
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
if (tensorflow_version != NULL) {
} else {
}
tensorflow_version_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
tensorflow_version, GetArenaNoVirtual());
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_version)
}
// string tensorflow_git_version = 6;
void MetaGraphDef_MetaInfoDef::clear_tensorflow_git_version() {
tensorflow_git_version_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
const ::std::string& MetaGraphDef_MetaInfoDef::tensorflow_git_version() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
return tensorflow_git_version_.Get();
}
void MetaGraphDef_MetaInfoDef::set_tensorflow_git_version(const ::std::string& value) {
tensorflow_git_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
// @@protoc_insertion_point(field_set:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
}
void MetaGraphDef_MetaInfoDef::set_tensorflow_git_version(const char* value) {
tensorflow_git_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_char:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
}
void MetaGraphDef_MetaInfoDef::set_tensorflow_git_version(const char* value,
size_t size) {
tensorflow_git_version_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_pointer:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
}
::std::string* MetaGraphDef_MetaInfoDef::mutable_tensorflow_git_version() {
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
return tensorflow_git_version_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* MetaGraphDef_MetaInfoDef::release_tensorflow_git_version() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
return tensorflow_git_version_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* MetaGraphDef_MetaInfoDef::unsafe_arena_release_tensorflow_git_version() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
return tensorflow_git_version_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
GetArenaNoVirtual());
}
void MetaGraphDef_MetaInfoDef::set_allocated_tensorflow_git_version(::std::string* tensorflow_git_version) {
if (tensorflow_git_version != NULL) {
} else {
}
tensorflow_git_version_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), tensorflow_git_version,
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
}
void MetaGraphDef_MetaInfoDef::unsafe_arena_set_allocated_tensorflow_git_version(
::std::string* tensorflow_git_version) {
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
if (tensorflow_git_version != NULL) {
} else {
}
tensorflow_git_version_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
tensorflow_git_version, GetArenaNoVirtual());
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.MetaInfoDef.tensorflow_git_version)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if PROTOBUF_INLINE_NOT_IN_HEADERS
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if PROTOBUF_INLINE_NOT_IN_HEADERS
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
void MetaGraphDef::_slow_mutable_meta_info_def() {
meta_info_def_ = ::google::protobuf::Arena::CreateMessage< ::tensorflow::MetaGraphDef_MetaInfoDef >(
GetArenaNoVirtual());
}
::tensorflow::MetaGraphDef_MetaInfoDef* MetaGraphDef::_slow_release_meta_info_def() {
if (meta_info_def_ == NULL) {
return NULL;
} else {
::tensorflow::MetaGraphDef_MetaInfoDef* temp = new ::tensorflow::MetaGraphDef_MetaInfoDef(*meta_info_def_);
meta_info_def_ = NULL;
return temp;
}
}
::tensorflow::MetaGraphDef_MetaInfoDef* MetaGraphDef::unsafe_arena_release_meta_info_def() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.meta_info_def)
::tensorflow::MetaGraphDef_MetaInfoDef* temp = meta_info_def_;
meta_info_def_ = NULL;
return temp;
}
void MetaGraphDef::_slow_set_allocated_meta_info_def(
::google::protobuf::Arena* message_arena, ::tensorflow::MetaGraphDef_MetaInfoDef** meta_info_def) {
if (message_arena != NULL &&
::google::protobuf::Arena::GetArena(*meta_info_def) == NULL) {
message_arena->Own(*meta_info_def);
} else if (message_arena !=
::google::protobuf::Arena::GetArena(*meta_info_def)) {
::tensorflow::MetaGraphDef_MetaInfoDef* new_meta_info_def =
::google::protobuf::Arena::CreateMessage< ::tensorflow::MetaGraphDef_MetaInfoDef >(
message_arena);
new_meta_info_def->CopyFrom(**meta_info_def);
*meta_info_def = new_meta_info_def;
}
}
void MetaGraphDef::unsafe_arena_set_allocated_meta_info_def(
::tensorflow::MetaGraphDef_MetaInfoDef* meta_info_def) {
if (GetArenaNoVirtual() == NULL) {
delete meta_info_def_;
}
meta_info_def_ = meta_info_def;
if (meta_info_def) {
} else {
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.meta_info_def)
}
void MetaGraphDef::_slow_mutable_graph_def() {
graph_def_ = ::google::protobuf::Arena::CreateMessage< ::tensorflow::GraphDef >(
GetArenaNoVirtual());
}
::tensorflow::GraphDef* MetaGraphDef::_slow_release_graph_def() {
if (graph_def_ == NULL) {
return NULL;
} else {
::tensorflow::GraphDef* temp = new ::tensorflow::GraphDef(*graph_def_);
graph_def_ = NULL;
return temp;
}
}
::tensorflow::GraphDef* MetaGraphDef::unsafe_arena_release_graph_def() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.graph_def)
::tensorflow::GraphDef* temp = graph_def_;
graph_def_ = NULL;
return temp;
}
void MetaGraphDef::_slow_set_allocated_graph_def(
::google::protobuf::Arena* message_arena, ::tensorflow::GraphDef** graph_def) {
if (message_arena != NULL &&
::google::protobuf::Arena::GetArena(*graph_def) == NULL) {
message_arena->Own(*graph_def);
} else if (message_arena !=
::google::protobuf::Arena::GetArena(*graph_def)) {
::tensorflow::GraphDef* new_graph_def =
::google::protobuf::Arena::CreateMessage< ::tensorflow::GraphDef >(
message_arena);
new_graph_def->CopyFrom(**graph_def);
*graph_def = new_graph_def;
}
}
void MetaGraphDef::unsafe_arena_set_allocated_graph_def(
::tensorflow::GraphDef* graph_def) {
if (GetArenaNoVirtual() == NULL) {
delete graph_def_;
}
graph_def_ = graph_def;
if (graph_def) {
} else {
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.graph_def)
}
void MetaGraphDef::_slow_mutable_saver_def() {
saver_def_ = ::google::protobuf::Arena::CreateMessage< ::tensorflow::SaverDef >(
GetArenaNoVirtual());
}
::tensorflow::SaverDef* MetaGraphDef::_slow_release_saver_def() {
if (saver_def_ == NULL) {
return NULL;
} else {
::tensorflow::SaverDef* temp = new ::tensorflow::SaverDef(*saver_def_);
saver_def_ = NULL;
return temp;
}
}
::tensorflow::SaverDef* MetaGraphDef::unsafe_arena_release_saver_def() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.MetaGraphDef.saver_def)
::tensorflow::SaverDef* temp = saver_def_;
saver_def_ = NULL;
return temp;
}
void MetaGraphDef::_slow_set_allocated_saver_def(
::google::protobuf::Arena* message_arena, ::tensorflow::SaverDef** saver_def) {
if (message_arena != NULL &&
::google::protobuf::Arena::GetArena(*saver_def) == NULL) {
message_arena->Own(*saver_def);
} else if (message_arena !=
::google::protobuf::Arena::GetArena(*saver_def)) {
::tensorflow::SaverDef* new_saver_def =
::google::protobuf::Arena::CreateMessage< ::tensorflow::SaverDef >(
message_arena);
new_saver_def->CopyFrom(**saver_def);
*saver_def = new_saver_def;
}
}
void MetaGraphDef::unsafe_arena_set_allocated_saver_def(
::tensorflow::SaverDef* saver_def) {
if (GetArenaNoVirtual() == NULL) {
delete saver_def_;
}
saver_def_ = saver_def;
if (saver_def) {
} else {
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.MetaGraphDef.saver_def)
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int MetaGraphDef::kMetaInfoDefFieldNumber;
const int MetaGraphDef::kGraphDefFieldNumber;
const int MetaGraphDef::kSaverDefFieldNumber;
const int MetaGraphDef::kCollectionDefFieldNumber;
const int MetaGraphDef::kSignatureDefFieldNumber;
const int MetaGraphDef::kAssetFileDefFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
MetaGraphDef::MetaGraphDef()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.MetaGraphDef)
}
MetaGraphDef::MetaGraphDef(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
collection_def_(arena),
signature_def_(arena),
asset_file_def_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.MetaGraphDef)
}
MetaGraphDef::MetaGraphDef(const MetaGraphDef& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
asset_file_def_(from.asset_file_def_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
const ::google::protobuf::Descriptor*& MetaGraphDef_CollectionDefEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[1].descriptor;
const ::google::protobuf::Descriptor*& MetaGraphDef_SignatureDefEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[2].descriptor;
collection_def_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
collection_def_.SetEntryDescriptor(
&MetaGraphDef_CollectionDefEntry_descriptor);
collection_def_.MergeFrom(from.collection_def_);
signature_def_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
signature_def_.SetEntryDescriptor(
&MetaGraphDef_SignatureDefEntry_descriptor);
signature_def_.MergeFrom(from.signature_def_);
if (from.has_meta_info_def()) {
meta_info_def_ = new ::tensorflow::MetaGraphDef_MetaInfoDef(*from.meta_info_def_);
} else {
meta_info_def_ = NULL;
}
if (from.has_graph_def()) {
graph_def_ = new ::tensorflow::GraphDef(*from.graph_def_);
} else {
graph_def_ = NULL;
}
if (from.has_saver_def()) {
saver_def_ = new ::tensorflow::SaverDef(*from.saver_def_);
} else {
saver_def_ = NULL;
}
// @@protoc_insertion_point(copy_constructor:tensorflow.MetaGraphDef)
}
void MetaGraphDef::SharedCtor() {
const ::google::protobuf::Descriptor*& MetaGraphDef_CollectionDefEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[1].descriptor;
const ::google::protobuf::Descriptor*& MetaGraphDef_SignatureDefEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[2].descriptor;
collection_def_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
collection_def_.SetEntryDescriptor(
&MetaGraphDef_CollectionDefEntry_descriptor);
signature_def_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
signature_def_.SetEntryDescriptor(
&MetaGraphDef_SignatureDefEntry_descriptor);
::memset(&meta_info_def_, 0, reinterpret_cast<char*>(&saver_def_) -
reinterpret_cast<char*>(&meta_info_def_) + sizeof(saver_def_));
_cached_size_ = 0;
}
MetaGraphDef::~MetaGraphDef() {
// @@protoc_insertion_point(destructor:tensorflow.MetaGraphDef)
SharedDtor();
}
void MetaGraphDef::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
if (this != internal_default_instance()) {
delete meta_info_def_;
}
if (this != internal_default_instance()) {
delete graph_def_;
}
if (this != internal_default_instance()) {
delete saver_def_;
}
}
void MetaGraphDef::ArenaDtor(void* object) {
MetaGraphDef* _this = reinterpret_cast< MetaGraphDef* >(object);
(void)_this;
}
void MetaGraphDef::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void MetaGraphDef::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* MetaGraphDef::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[3].descriptor;
}
const MetaGraphDef& MetaGraphDef::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
MetaGraphDef* MetaGraphDef::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<MetaGraphDef>(arena);
}
void MetaGraphDef::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.MetaGraphDef)
collection_def_.Clear();
signature_def_.Clear();
asset_file_def_.Clear();
if (GetArenaNoVirtual() == NULL && meta_info_def_ != NULL) {
delete meta_info_def_;
}
meta_info_def_ = NULL;
if (GetArenaNoVirtual() == NULL && graph_def_ != NULL) {
delete graph_def_;
}
graph_def_ = NULL;
if (GetArenaNoVirtual() == NULL && saver_def_ != NULL) {
delete saver_def_;
}
saver_def_ = NULL;
}
bool MetaGraphDef::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.MetaGraphDef)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// .tensorflow.MetaGraphDef.MetaInfoDef meta_info_def = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_meta_info_def()));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.GraphDef graph_def = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_graph_def()));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.SaverDef saver_def = 3;
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(26u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_saver_def()));
} else {
goto handle_unusual;
}
break;
}
// map<string, .tensorflow.CollectionDef> collection_def = 4;
case 4: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(34u)) {
DO_(input->IncrementRecursionDepth());
MetaGraphDef_CollectionDefEntry::Parser< ::google::protobuf::internal::MapField<
::std::string, ::tensorflow::CollectionDef,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0 >,
::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef > > parser(&collection_def_);
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, &parser));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
parser.key().data(), parser.key().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.MetaGraphDef.CollectionDefEntry.key"));
} else {
goto handle_unusual;
}
input->UnsafeDecrementRecursionDepth();
break;
}
// map<string, .tensorflow.SignatureDef> signature_def = 5;
case 5: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(42u)) {
DO_(input->IncrementRecursionDepth());
MetaGraphDef_SignatureDefEntry::Parser< ::google::protobuf::internal::MapField<
::std::string, ::tensorflow::SignatureDef,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0 >,
::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef > > parser(&signature_def_);
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, &parser));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
parser.key().data(), parser.key().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.MetaGraphDef.SignatureDefEntry.key"));
} else {
goto handle_unusual;
}
input->UnsafeDecrementRecursionDepth();
break;
}
// repeated .tensorflow.AssetFileDef asset_file_def = 6;
case 6: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(50u)) {
DO_(input->IncrementRecursionDepth());
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtualNoRecursionDepth(
input, add_asset_file_def()));
} else {
goto handle_unusual;
}
input->UnsafeDecrementRecursionDepth();
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.MetaGraphDef)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.MetaGraphDef)
return false;
#undef DO_
}
void MetaGraphDef::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.MetaGraphDef)
// .tensorflow.MetaGraphDef.MetaInfoDef meta_info_def = 1;
if (this->has_meta_info_def()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, *this->meta_info_def_, output);
}
// .tensorflow.GraphDef graph_def = 2;
if (this->has_graph_def()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, *this->graph_def_, output);
}
// .tensorflow.SaverDef saver_def = 3;
if (this->has_saver_def()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, *this->saver_def_, output);
}
// map<string, .tensorflow.CollectionDef> collection_def = 4;
if (!this->collection_def().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.CollectionDefEntry.key");
}
};
if (output->IsSerializationDeterministic() &&
this->collection_def().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->collection_def().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::const_iterator
it = this->collection_def().begin();
it != this->collection_def().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<MetaGraphDef_CollectionDefEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(collection_def_.NewEntryWrapper(
items[i]->first, items[i]->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
4, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<MetaGraphDef_CollectionDefEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::const_iterator
it = this->collection_def().begin();
it != this->collection_def().end(); ++it) {
entry.reset(collection_def_.NewEntryWrapper(
it->first, it->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
4, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// map<string, .tensorflow.SignatureDef> signature_def = 5;
if (!this->signature_def().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.SignatureDefEntry.key");
}
};
if (output->IsSerializationDeterministic() &&
this->signature_def().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->signature_def().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::const_iterator
it = this->signature_def().begin();
it != this->signature_def().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<MetaGraphDef_SignatureDefEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(signature_def_.NewEntryWrapper(
items[i]->first, items[i]->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
5, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<MetaGraphDef_SignatureDefEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::const_iterator
it = this->signature_def().begin();
it != this->signature_def().end(); ++it) {
entry.reset(signature_def_.NewEntryWrapper(
it->first, it->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
5, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// repeated .tensorflow.AssetFileDef asset_file_def = 6;
for (unsigned int i = 0, n = this->asset_file_def_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
6, this->asset_file_def(i), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.MetaGraphDef)
}
::google::protobuf::uint8* MetaGraphDef::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.MetaGraphDef)
// .tensorflow.MetaGraphDef.MetaInfoDef meta_info_def = 1;
if (this->has_meta_info_def()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
1, *this->meta_info_def_, false, target);
}
// .tensorflow.GraphDef graph_def = 2;
if (this->has_graph_def()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
2, *this->graph_def_, false, target);
}
// .tensorflow.SaverDef saver_def = 3;
if (this->has_saver_def()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
3, *this->saver_def_, false, target);
}
// map<string, .tensorflow.CollectionDef> collection_def = 4;
if (!this->collection_def().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.CollectionDefEntry.key");
}
};
if (deterministic &&
this->collection_def().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->collection_def().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::const_iterator
it = this->collection_def().begin();
it != this->collection_def().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<MetaGraphDef_CollectionDefEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(collection_def_.NewEntryWrapper(
items[i]->first, items[i]->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
4, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<MetaGraphDef_CollectionDefEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::const_iterator
it = this->collection_def().begin();
it != this->collection_def().end(); ++it) {
entry.reset(collection_def_.NewEntryWrapper(
it->first, it->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
4, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// map<string, .tensorflow.SignatureDef> signature_def = 5;
if (!this->signature_def().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.MetaGraphDef.SignatureDefEntry.key");
}
};
if (deterministic &&
this->signature_def().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->signature_def().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::const_iterator
it = this->signature_def().begin();
it != this->signature_def().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<MetaGraphDef_SignatureDefEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(signature_def_.NewEntryWrapper(
items[i]->first, items[i]->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
5, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<MetaGraphDef_SignatureDefEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::const_iterator
it = this->signature_def().begin();
it != this->signature_def().end(); ++it) {
entry.reset(signature_def_.NewEntryWrapper(
it->first, it->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
5, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// repeated .tensorflow.AssetFileDef asset_file_def = 6;
for (unsigned int i = 0, n = this->asset_file_def_size(); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
6, this->asset_file_def(i), false, target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.MetaGraphDef)
return target;
}
size_t MetaGraphDef::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.MetaGraphDef)
size_t total_size = 0;
// map<string, .tensorflow.CollectionDef> collection_def = 4;
total_size += 1 *
::google::protobuf::internal::FromIntSize(this->collection_def_size());
{
::google::protobuf::scoped_ptr<MetaGraphDef_CollectionDefEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >::const_iterator
it = this->collection_def().begin();
it != this->collection_def().end(); ++it) {
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
entry.reset(collection_def_.NewEntryWrapper(it->first, it->second));
total_size += ::google::protobuf::internal::WireFormatLite::
MessageSizeNoVirtual(*entry);
}
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
}
// map<string, .tensorflow.SignatureDef> signature_def = 5;
total_size += 1 *
::google::protobuf::internal::FromIntSize(this->signature_def_size());
{
::google::protobuf::scoped_ptr<MetaGraphDef_SignatureDefEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >::const_iterator
it = this->signature_def().begin();
it != this->signature_def().end(); ++it) {
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
entry.reset(signature_def_.NewEntryWrapper(it->first, it->second));
total_size += ::google::protobuf::internal::WireFormatLite::
MessageSizeNoVirtual(*entry);
}
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
}
// repeated .tensorflow.AssetFileDef asset_file_def = 6;
{
unsigned int count = this->asset_file_def_size();
total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->asset_file_def(i));
}
}
// .tensorflow.MetaGraphDef.MetaInfoDef meta_info_def = 1;
if (this->has_meta_info_def()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->meta_info_def_);
}
// .tensorflow.GraphDef graph_def = 2;
if (this->has_graph_def()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->graph_def_);
}
// .tensorflow.SaverDef saver_def = 3;
if (this->has_saver_def()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->saver_def_);
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void MetaGraphDef::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.MetaGraphDef)
GOOGLE_DCHECK_NE(&from, this);
const MetaGraphDef* source =
::google::protobuf::internal::DynamicCastToGenerated<const MetaGraphDef>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.MetaGraphDef)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.MetaGraphDef)
MergeFrom(*source);
}
}
void MetaGraphDef::MergeFrom(const MetaGraphDef& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.MetaGraphDef)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
collection_def_.MergeFrom(from.collection_def_);
signature_def_.MergeFrom(from.signature_def_);
asset_file_def_.MergeFrom(from.asset_file_def_);
if (from.has_meta_info_def()) {
mutable_meta_info_def()->::tensorflow::MetaGraphDef_MetaInfoDef::MergeFrom(from.meta_info_def());
}
if (from.has_graph_def()) {
mutable_graph_def()->::tensorflow::GraphDef::MergeFrom(from.graph_def());
}
if (from.has_saver_def()) {
mutable_saver_def()->::tensorflow::SaverDef::MergeFrom(from.saver_def());
}
}
void MetaGraphDef::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.MetaGraphDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void MetaGraphDef::CopyFrom(const MetaGraphDef& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.MetaGraphDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool MetaGraphDef::IsInitialized() const {
return true;
}
void MetaGraphDef::Swap(MetaGraphDef* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
MetaGraphDef* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void MetaGraphDef::UnsafeArenaSwap(MetaGraphDef* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void MetaGraphDef::InternalSwap(MetaGraphDef* other) {
collection_def_.Swap(&other->collection_def_);
signature_def_.Swap(&other->signature_def_);
asset_file_def_.UnsafeArenaSwap(&other->asset_file_def_);
std::swap(meta_info_def_, other->meta_info_def_);
std::swap(graph_def_, other->graph_def_);
std::swap(saver_def_, other->saver_def_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata MetaGraphDef::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[3];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// MetaGraphDef
// .tensorflow.MetaGraphDef.MetaInfoDef meta_info_def = 1;
bool MetaGraphDef::has_meta_info_def() const {
return this != internal_default_instance() && meta_info_def_ != NULL;
}
void MetaGraphDef::clear_meta_info_def() {
if (GetArenaNoVirtual() == NULL && meta_info_def_ != NULL) delete meta_info_def_;
meta_info_def_ = NULL;
}
const ::tensorflow::MetaGraphDef_MetaInfoDef& MetaGraphDef::meta_info_def() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.meta_info_def)
return meta_info_def_ != NULL ? *meta_info_def_
: *::tensorflow::MetaGraphDef_MetaInfoDef::internal_default_instance();
}
::tensorflow::MetaGraphDef_MetaInfoDef* MetaGraphDef::mutable_meta_info_def() {
if (meta_info_def_ == NULL) {
_slow_mutable_meta_info_def();
}
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.meta_info_def)
return meta_info_def_;
}
::tensorflow::MetaGraphDef_MetaInfoDef* MetaGraphDef::release_meta_info_def() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.meta_info_def)
if (GetArenaNoVirtual() != NULL) {
return _slow_release_meta_info_def();
} else {
::tensorflow::MetaGraphDef_MetaInfoDef* temp = meta_info_def_;
meta_info_def_ = NULL;
return temp;
}
}
void MetaGraphDef::set_allocated_meta_info_def(::tensorflow::MetaGraphDef_MetaInfoDef* meta_info_def) {
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
if (message_arena == NULL) {
delete meta_info_def_;
}
if (meta_info_def != NULL) {
_slow_set_allocated_meta_info_def(message_arena, &meta_info_def);
}
meta_info_def_ = meta_info_def;
if (meta_info_def) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.meta_info_def)
}
// .tensorflow.GraphDef graph_def = 2;
bool MetaGraphDef::has_graph_def() const {
return this != internal_default_instance() && graph_def_ != NULL;
}
void MetaGraphDef::clear_graph_def() {
if (GetArenaNoVirtual() == NULL && graph_def_ != NULL) delete graph_def_;
graph_def_ = NULL;
}
const ::tensorflow::GraphDef& MetaGraphDef::graph_def() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.graph_def)
return graph_def_ != NULL ? *graph_def_
: *::tensorflow::GraphDef::internal_default_instance();
}
::tensorflow::GraphDef* MetaGraphDef::mutable_graph_def() {
if (graph_def_ == NULL) {
_slow_mutable_graph_def();
}
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.graph_def)
return graph_def_;
}
::tensorflow::GraphDef* MetaGraphDef::release_graph_def() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.graph_def)
if (GetArenaNoVirtual() != NULL) {
return _slow_release_graph_def();
} else {
::tensorflow::GraphDef* temp = graph_def_;
graph_def_ = NULL;
return temp;
}
}
void MetaGraphDef::set_allocated_graph_def(::tensorflow::GraphDef* graph_def) {
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
if (message_arena == NULL) {
delete graph_def_;
}
if (graph_def != NULL) {
_slow_set_allocated_graph_def(message_arena, &graph_def);
}
graph_def_ = graph_def;
if (graph_def) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.graph_def)
}
// .tensorflow.SaverDef saver_def = 3;
bool MetaGraphDef::has_saver_def() const {
return this != internal_default_instance() && saver_def_ != NULL;
}
void MetaGraphDef::clear_saver_def() {
if (GetArenaNoVirtual() == NULL && saver_def_ != NULL) delete saver_def_;
saver_def_ = NULL;
}
const ::tensorflow::SaverDef& MetaGraphDef::saver_def() const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.saver_def)
return saver_def_ != NULL ? *saver_def_
: *::tensorflow::SaverDef::internal_default_instance();
}
::tensorflow::SaverDef* MetaGraphDef::mutable_saver_def() {
if (saver_def_ == NULL) {
_slow_mutable_saver_def();
}
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.saver_def)
return saver_def_;
}
::tensorflow::SaverDef* MetaGraphDef::release_saver_def() {
// @@protoc_insertion_point(field_release:tensorflow.MetaGraphDef.saver_def)
if (GetArenaNoVirtual() != NULL) {
return _slow_release_saver_def();
} else {
::tensorflow::SaverDef* temp = saver_def_;
saver_def_ = NULL;
return temp;
}
}
void MetaGraphDef::set_allocated_saver_def(::tensorflow::SaverDef* saver_def) {
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
if (message_arena == NULL) {
delete saver_def_;
}
if (saver_def != NULL) {
_slow_set_allocated_saver_def(message_arena, &saver_def);
}
saver_def_ = saver_def;
if (saver_def) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.MetaGraphDef.saver_def)
}
// map<string, .tensorflow.CollectionDef> collection_def = 4;
int MetaGraphDef::collection_def_size() const {
return collection_def_.size();
}
void MetaGraphDef::clear_collection_def() {
collection_def_.Clear();
}
const ::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >&
MetaGraphDef::collection_def() const {
// @@protoc_insertion_point(field_map:tensorflow.MetaGraphDef.collection_def)
return collection_def_.GetMap();
}
::google::protobuf::Map< ::std::string, ::tensorflow::CollectionDef >*
MetaGraphDef::mutable_collection_def() {
// @@protoc_insertion_point(field_mutable_map:tensorflow.MetaGraphDef.collection_def)
return collection_def_.MutableMap();
}
// map<string, .tensorflow.SignatureDef> signature_def = 5;
int MetaGraphDef::signature_def_size() const {
return signature_def_.size();
}
void MetaGraphDef::clear_signature_def() {
signature_def_.Clear();
}
const ::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >&
MetaGraphDef::signature_def() const {
// @@protoc_insertion_point(field_map:tensorflow.MetaGraphDef.signature_def)
return signature_def_.GetMap();
}
::google::protobuf::Map< ::std::string, ::tensorflow::SignatureDef >*
MetaGraphDef::mutable_signature_def() {
// @@protoc_insertion_point(field_mutable_map:tensorflow.MetaGraphDef.signature_def)
return signature_def_.MutableMap();
}
// repeated .tensorflow.AssetFileDef asset_file_def = 6;
int MetaGraphDef::asset_file_def_size() const {
return asset_file_def_.size();
}
void MetaGraphDef::clear_asset_file_def() {
asset_file_def_.Clear();
}
const ::tensorflow::AssetFileDef& MetaGraphDef::asset_file_def(int index) const {
// @@protoc_insertion_point(field_get:tensorflow.MetaGraphDef.asset_file_def)
return asset_file_def_.Get(index);
}
::tensorflow::AssetFileDef* MetaGraphDef::mutable_asset_file_def(int index) {
// @@protoc_insertion_point(field_mutable:tensorflow.MetaGraphDef.asset_file_def)
return asset_file_def_.Mutable(index);
}
::tensorflow::AssetFileDef* MetaGraphDef::add_asset_file_def() {
// @@protoc_insertion_point(field_add:tensorflow.MetaGraphDef.asset_file_def)
return asset_file_def_.Add();
}
::google::protobuf::RepeatedPtrField< ::tensorflow::AssetFileDef >*
MetaGraphDef::mutable_asset_file_def() {
// @@protoc_insertion_point(field_mutable_list:tensorflow.MetaGraphDef.asset_file_def)
return &asset_file_def_;
}
const ::google::protobuf::RepeatedPtrField< ::tensorflow::AssetFileDef >&
MetaGraphDef::asset_file_def() const {
// @@protoc_insertion_point(field_list:tensorflow.MetaGraphDef.asset_file_def)
return asset_file_def_;
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int CollectionDef_NodeList::kValueFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
CollectionDef_NodeList::CollectionDef_NodeList()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.CollectionDef.NodeList)
}
CollectionDef_NodeList::CollectionDef_NodeList(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
value_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.CollectionDef.NodeList)
}
CollectionDef_NodeList::CollectionDef_NodeList(const CollectionDef_NodeList& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
value_(from.value_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
// @@protoc_insertion_point(copy_constructor:tensorflow.CollectionDef.NodeList)
}
void CollectionDef_NodeList::SharedCtor() {
_cached_size_ = 0;
}
CollectionDef_NodeList::~CollectionDef_NodeList() {
// @@protoc_insertion_point(destructor:tensorflow.CollectionDef.NodeList)
SharedDtor();
}
void CollectionDef_NodeList::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
}
void CollectionDef_NodeList::ArenaDtor(void* object) {
CollectionDef_NodeList* _this = reinterpret_cast< CollectionDef_NodeList* >(object);
(void)_this;
}
void CollectionDef_NodeList::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void CollectionDef_NodeList::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* CollectionDef_NodeList::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[4].descriptor;
}
const CollectionDef_NodeList& CollectionDef_NodeList::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
CollectionDef_NodeList* CollectionDef_NodeList::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<CollectionDef_NodeList>(arena);
}
void CollectionDef_NodeList::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.CollectionDef.NodeList)
value_.Clear();
}
bool CollectionDef_NodeList::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.CollectionDef.NodeList)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated string value = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->add_value()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->value(this->value_size() - 1).data(),
this->value(this->value_size() - 1).length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.CollectionDef.NodeList.value"));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.CollectionDef.NodeList)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.CollectionDef.NodeList)
return false;
#undef DO_
}
void CollectionDef_NodeList::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.CollectionDef.NodeList)
// repeated string value = 1;
for (int i = 0, n = this->value_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->value(i).data(), this->value(i).length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.CollectionDef.NodeList.value");
::google::protobuf::internal::WireFormatLite::WriteString(
1, this->value(i), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.CollectionDef.NodeList)
}
::google::protobuf::uint8* CollectionDef_NodeList::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.CollectionDef.NodeList)
// repeated string value = 1;
for (int i = 0, n = this->value_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->value(i).data(), this->value(i).length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.CollectionDef.NodeList.value");
target = ::google::protobuf::internal::WireFormatLite::
WriteStringToArray(1, this->value(i), target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.CollectionDef.NodeList)
return target;
}
size_t CollectionDef_NodeList::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.CollectionDef.NodeList)
size_t total_size = 0;
// repeated string value = 1;
total_size += 1 *
::google::protobuf::internal::FromIntSize(this->value_size());
for (int i = 0, n = this->value_size(); i < n; i++) {
total_size += ::google::protobuf::internal::WireFormatLite::StringSize(
this->value(i));
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void CollectionDef_NodeList::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.CollectionDef.NodeList)
GOOGLE_DCHECK_NE(&from, this);
const CollectionDef_NodeList* source =
::google::protobuf::internal::DynamicCastToGenerated<const CollectionDef_NodeList>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.CollectionDef.NodeList)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.CollectionDef.NodeList)
MergeFrom(*source);
}
}
void CollectionDef_NodeList::MergeFrom(const CollectionDef_NodeList& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.CollectionDef.NodeList)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
value_.MergeFrom(from.value_);
}
void CollectionDef_NodeList::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.CollectionDef.NodeList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void CollectionDef_NodeList::CopyFrom(const CollectionDef_NodeList& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.CollectionDef.NodeList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool CollectionDef_NodeList::IsInitialized() const {
return true;
}
void CollectionDef_NodeList::Swap(CollectionDef_NodeList* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
CollectionDef_NodeList* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void CollectionDef_NodeList::UnsafeArenaSwap(CollectionDef_NodeList* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void CollectionDef_NodeList::InternalSwap(CollectionDef_NodeList* other) {
value_.UnsafeArenaSwap(&other->value_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata CollectionDef_NodeList::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[4];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// CollectionDef_NodeList
// repeated string value = 1;
int CollectionDef_NodeList::value_size() const {
return value_.size();
}
void CollectionDef_NodeList::clear_value() {
value_.Clear();
}
const ::std::string& CollectionDef_NodeList::value(int index) const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.NodeList.value)
return value_.Get(index);
}
::std::string* CollectionDef_NodeList::mutable_value(int index) {
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.NodeList.value)
return value_.Mutable(index);
}
void CollectionDef_NodeList::set_value(int index, const ::std::string& value) {
// @@protoc_insertion_point(field_set:tensorflow.CollectionDef.NodeList.value)
value_.Mutable(index)->assign(value);
}
#if LANG_CXX11
void CollectionDef_NodeList::set_value(int index, ::std::string&& value) {
// @@protoc_insertion_point(field_set:tensorflow.CollectionDef.NodeList.value)
value_.Mutable(index)->assign(std::move(value));
}
#endif
void CollectionDef_NodeList::set_value(int index, const char* value) {
value_.Mutable(index)->assign(value);
// @@protoc_insertion_point(field_set_char:tensorflow.CollectionDef.NodeList.value)
}
void CollectionDef_NodeList::set_value(int index, const char* value, size_t size) {
value_.Mutable(index)->assign(
reinterpret_cast<const char*>(value), size);
// @@protoc_insertion_point(field_set_pointer:tensorflow.CollectionDef.NodeList.value)
}
::std::string* CollectionDef_NodeList::add_value() {
// @@protoc_insertion_point(field_add_mutable:tensorflow.CollectionDef.NodeList.value)
return value_.Add();
}
void CollectionDef_NodeList::add_value(const ::std::string& value) {
value_.Add()->assign(value);
// @@protoc_insertion_point(field_add:tensorflow.CollectionDef.NodeList.value)
}
#if LANG_CXX11
void CollectionDef_NodeList::add_value(::std::string&& value) {
value_.Add()->assign(std::move(value));
// @@protoc_insertion_point(field_add:tensorflow.CollectionDef.NodeList.value)
}
#endif
void CollectionDef_NodeList::add_value(const char* value) {
value_.Add()->assign(value);
// @@protoc_insertion_point(field_add_char:tensorflow.CollectionDef.NodeList.value)
}
void CollectionDef_NodeList::add_value(const char* value, size_t size) {
value_.Add()->assign(reinterpret_cast<const char*>(value), size);
// @@protoc_insertion_point(field_add_pointer:tensorflow.CollectionDef.NodeList.value)
}
const ::google::protobuf::RepeatedPtrField< ::std::string>&
CollectionDef_NodeList::value() const {
// @@protoc_insertion_point(field_list:tensorflow.CollectionDef.NodeList.value)
return value_;
}
::google::protobuf::RepeatedPtrField< ::std::string>*
CollectionDef_NodeList::mutable_value() {
// @@protoc_insertion_point(field_mutable_list:tensorflow.CollectionDef.NodeList.value)
return &value_;
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int CollectionDef_BytesList::kValueFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
CollectionDef_BytesList::CollectionDef_BytesList()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.CollectionDef.BytesList)
}
CollectionDef_BytesList::CollectionDef_BytesList(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
value_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.CollectionDef.BytesList)
}
CollectionDef_BytesList::CollectionDef_BytesList(const CollectionDef_BytesList& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
value_(from.value_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
// @@protoc_insertion_point(copy_constructor:tensorflow.CollectionDef.BytesList)
}
void CollectionDef_BytesList::SharedCtor() {
_cached_size_ = 0;
}
CollectionDef_BytesList::~CollectionDef_BytesList() {
// @@protoc_insertion_point(destructor:tensorflow.CollectionDef.BytesList)
SharedDtor();
}
void CollectionDef_BytesList::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
}
void CollectionDef_BytesList::ArenaDtor(void* object) {
CollectionDef_BytesList* _this = reinterpret_cast< CollectionDef_BytesList* >(object);
(void)_this;
}
void CollectionDef_BytesList::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void CollectionDef_BytesList::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* CollectionDef_BytesList::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[5].descriptor;
}
const CollectionDef_BytesList& CollectionDef_BytesList::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
CollectionDef_BytesList* CollectionDef_BytesList::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<CollectionDef_BytesList>(arena);
}
void CollectionDef_BytesList::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.CollectionDef.BytesList)
value_.Clear();
}
bool CollectionDef_BytesList::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.CollectionDef.BytesList)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated bytes value = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadBytes(
input, this->add_value()));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.CollectionDef.BytesList)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.CollectionDef.BytesList)
return false;
#undef DO_
}
void CollectionDef_BytesList::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.CollectionDef.BytesList)
// repeated bytes value = 1;
for (int i = 0, n = this->value_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteBytes(
1, this->value(i), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.CollectionDef.BytesList)
}
::google::protobuf::uint8* CollectionDef_BytesList::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.CollectionDef.BytesList)
// repeated bytes value = 1;
for (int i = 0, n = this->value_size(); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteBytesToArray(1, this->value(i), target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.CollectionDef.BytesList)
return target;
}
size_t CollectionDef_BytesList::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.CollectionDef.BytesList)
size_t total_size = 0;
// repeated bytes value = 1;
total_size += 1 *
::google::protobuf::internal::FromIntSize(this->value_size());
for (int i = 0, n = this->value_size(); i < n; i++) {
total_size += ::google::protobuf::internal::WireFormatLite::BytesSize(
this->value(i));
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void CollectionDef_BytesList::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.CollectionDef.BytesList)
GOOGLE_DCHECK_NE(&from, this);
const CollectionDef_BytesList* source =
::google::protobuf::internal::DynamicCastToGenerated<const CollectionDef_BytesList>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.CollectionDef.BytesList)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.CollectionDef.BytesList)
MergeFrom(*source);
}
}
void CollectionDef_BytesList::MergeFrom(const CollectionDef_BytesList& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.CollectionDef.BytesList)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
value_.MergeFrom(from.value_);
}
void CollectionDef_BytesList::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.CollectionDef.BytesList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void CollectionDef_BytesList::CopyFrom(const CollectionDef_BytesList& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.CollectionDef.BytesList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool CollectionDef_BytesList::IsInitialized() const {
return true;
}
void CollectionDef_BytesList::Swap(CollectionDef_BytesList* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
CollectionDef_BytesList* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void CollectionDef_BytesList::UnsafeArenaSwap(CollectionDef_BytesList* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void CollectionDef_BytesList::InternalSwap(CollectionDef_BytesList* other) {
value_.UnsafeArenaSwap(&other->value_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata CollectionDef_BytesList::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[5];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// CollectionDef_BytesList
// repeated bytes value = 1;
int CollectionDef_BytesList::value_size() const {
return value_.size();
}
void CollectionDef_BytesList::clear_value() {
value_.Clear();
}
const ::std::string& CollectionDef_BytesList::value(int index) const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.BytesList.value)
return value_.Get(index);
}
::std::string* CollectionDef_BytesList::mutable_value(int index) {
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.BytesList.value)
return value_.Mutable(index);
}
void CollectionDef_BytesList::set_value(int index, const ::std::string& value) {
// @@protoc_insertion_point(field_set:tensorflow.CollectionDef.BytesList.value)
value_.Mutable(index)->assign(value);
}
#if LANG_CXX11
void CollectionDef_BytesList::set_value(int index, ::std::string&& value) {
// @@protoc_insertion_point(field_set:tensorflow.CollectionDef.BytesList.value)
value_.Mutable(index)->assign(std::move(value));
}
#endif
void CollectionDef_BytesList::set_value(int index, const char* value) {
value_.Mutable(index)->assign(value);
// @@protoc_insertion_point(field_set_char:tensorflow.CollectionDef.BytesList.value)
}
void CollectionDef_BytesList::set_value(int index, const void* value, size_t size) {
value_.Mutable(index)->assign(
reinterpret_cast<const char*>(value), size);
// @@protoc_insertion_point(field_set_pointer:tensorflow.CollectionDef.BytesList.value)
}
::std::string* CollectionDef_BytesList::add_value() {
// @@protoc_insertion_point(field_add_mutable:tensorflow.CollectionDef.BytesList.value)
return value_.Add();
}
void CollectionDef_BytesList::add_value(const ::std::string& value) {
value_.Add()->assign(value);
// @@protoc_insertion_point(field_add:tensorflow.CollectionDef.BytesList.value)
}
#if LANG_CXX11
void CollectionDef_BytesList::add_value(::std::string&& value) {
value_.Add()->assign(std::move(value));
// @@protoc_insertion_point(field_add:tensorflow.CollectionDef.BytesList.value)
}
#endif
void CollectionDef_BytesList::add_value(const char* value) {
value_.Add()->assign(value);
// @@protoc_insertion_point(field_add_char:tensorflow.CollectionDef.BytesList.value)
}
void CollectionDef_BytesList::add_value(const void* value, size_t size) {
value_.Add()->assign(reinterpret_cast<const char*>(value), size);
// @@protoc_insertion_point(field_add_pointer:tensorflow.CollectionDef.BytesList.value)
}
const ::google::protobuf::RepeatedPtrField< ::std::string>&
CollectionDef_BytesList::value() const {
// @@protoc_insertion_point(field_list:tensorflow.CollectionDef.BytesList.value)
return value_;
}
::google::protobuf::RepeatedPtrField< ::std::string>*
CollectionDef_BytesList::mutable_value() {
// @@protoc_insertion_point(field_mutable_list:tensorflow.CollectionDef.BytesList.value)
return &value_;
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int CollectionDef_Int64List::kValueFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
CollectionDef_Int64List::CollectionDef_Int64List()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.CollectionDef.Int64List)
}
CollectionDef_Int64List::CollectionDef_Int64List(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
value_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.CollectionDef.Int64List)
}
CollectionDef_Int64List::CollectionDef_Int64List(const CollectionDef_Int64List& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
value_(from.value_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
// @@protoc_insertion_point(copy_constructor:tensorflow.CollectionDef.Int64List)
}
void CollectionDef_Int64List::SharedCtor() {
_cached_size_ = 0;
}
CollectionDef_Int64List::~CollectionDef_Int64List() {
// @@protoc_insertion_point(destructor:tensorflow.CollectionDef.Int64List)
SharedDtor();
}
void CollectionDef_Int64List::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
}
void CollectionDef_Int64List::ArenaDtor(void* object) {
CollectionDef_Int64List* _this = reinterpret_cast< CollectionDef_Int64List* >(object);
(void)_this;
}
void CollectionDef_Int64List::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void CollectionDef_Int64List::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* CollectionDef_Int64List::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[6].descriptor;
}
const CollectionDef_Int64List& CollectionDef_Int64List::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
CollectionDef_Int64List* CollectionDef_Int64List::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<CollectionDef_Int64List>(arena);
}
void CollectionDef_Int64List::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.CollectionDef.Int64List)
value_.Clear();
}
bool CollectionDef_Int64List::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.CollectionDef.Int64List)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated int64 value = 1 [packed = true];
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPackedPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, this->mutable_value())));
} else if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(8u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
1, 10u, input, this->mutable_value())));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.CollectionDef.Int64List)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.CollectionDef.Int64List)
return false;
#undef DO_
}
void CollectionDef_Int64List::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.CollectionDef.Int64List)
// repeated int64 value = 1 [packed = true];
if (this->value_size() > 0) {
::google::protobuf::internal::WireFormatLite::WriteTag(1, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output);
output->WriteVarint32(_value_cached_byte_size_);
}
for (int i = 0, n = this->value_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteInt64NoTag(
this->value(i), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.CollectionDef.Int64List)
}
::google::protobuf::uint8* CollectionDef_Int64List::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.CollectionDef.Int64List)
// repeated int64 value = 1 [packed = true];
if (this->value_size() > 0) {
target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray(
1,
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,
target);
target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray(
_value_cached_byte_size_, target);
}
for (int i = 0, n = this->value_size(); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteInt64NoTagToArray(this->value(i), target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.CollectionDef.Int64List)
return target;
}
size_t CollectionDef_Int64List::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.CollectionDef.Int64List)
size_t total_size = 0;
// repeated int64 value = 1 [packed = true];
{
size_t data_size = ::google::protobuf::internal::WireFormatLite::
Int64Size(this->value_);
if (data_size > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(data_size);
}
int cached_size = ::google::protobuf::internal::ToCachedSize(data_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_value_cached_byte_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
total_size += data_size;
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void CollectionDef_Int64List::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.CollectionDef.Int64List)
GOOGLE_DCHECK_NE(&from, this);
const CollectionDef_Int64List* source =
::google::protobuf::internal::DynamicCastToGenerated<const CollectionDef_Int64List>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.CollectionDef.Int64List)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.CollectionDef.Int64List)
MergeFrom(*source);
}
}
void CollectionDef_Int64List::MergeFrom(const CollectionDef_Int64List& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.CollectionDef.Int64List)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
value_.MergeFrom(from.value_);
}
void CollectionDef_Int64List::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.CollectionDef.Int64List)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void CollectionDef_Int64List::CopyFrom(const CollectionDef_Int64List& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.CollectionDef.Int64List)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool CollectionDef_Int64List::IsInitialized() const {
return true;
}
void CollectionDef_Int64List::Swap(CollectionDef_Int64List* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
CollectionDef_Int64List* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void CollectionDef_Int64List::UnsafeArenaSwap(CollectionDef_Int64List* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void CollectionDef_Int64List::InternalSwap(CollectionDef_Int64List* other) {
value_.UnsafeArenaSwap(&other->value_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata CollectionDef_Int64List::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[6];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// CollectionDef_Int64List
// repeated int64 value = 1 [packed = true];
int CollectionDef_Int64List::value_size() const {
return value_.size();
}
void CollectionDef_Int64List::clear_value() {
value_.Clear();
}
::google::protobuf::int64 CollectionDef_Int64List::value(int index) const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.Int64List.value)
return value_.Get(index);
}
void CollectionDef_Int64List::set_value(int index, ::google::protobuf::int64 value) {
value_.Set(index, value);
// @@protoc_insertion_point(field_set:tensorflow.CollectionDef.Int64List.value)
}
void CollectionDef_Int64List::add_value(::google::protobuf::int64 value) {
value_.Add(value);
// @@protoc_insertion_point(field_add:tensorflow.CollectionDef.Int64List.value)
}
const ::google::protobuf::RepeatedField< ::google::protobuf::int64 >&
CollectionDef_Int64List::value() const {
// @@protoc_insertion_point(field_list:tensorflow.CollectionDef.Int64List.value)
return value_;
}
::google::protobuf::RepeatedField< ::google::protobuf::int64 >*
CollectionDef_Int64List::mutable_value() {
// @@protoc_insertion_point(field_mutable_list:tensorflow.CollectionDef.Int64List.value)
return &value_;
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int CollectionDef_FloatList::kValueFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
CollectionDef_FloatList::CollectionDef_FloatList()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.CollectionDef.FloatList)
}
CollectionDef_FloatList::CollectionDef_FloatList(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
value_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.CollectionDef.FloatList)
}
CollectionDef_FloatList::CollectionDef_FloatList(const CollectionDef_FloatList& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
value_(from.value_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
// @@protoc_insertion_point(copy_constructor:tensorflow.CollectionDef.FloatList)
}
void CollectionDef_FloatList::SharedCtor() {
_cached_size_ = 0;
}
CollectionDef_FloatList::~CollectionDef_FloatList() {
// @@protoc_insertion_point(destructor:tensorflow.CollectionDef.FloatList)
SharedDtor();
}
void CollectionDef_FloatList::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
}
void CollectionDef_FloatList::ArenaDtor(void* object) {
CollectionDef_FloatList* _this = reinterpret_cast< CollectionDef_FloatList* >(object);
(void)_this;
}
void CollectionDef_FloatList::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void CollectionDef_FloatList::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* CollectionDef_FloatList::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[7].descriptor;
}
const CollectionDef_FloatList& CollectionDef_FloatList::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
CollectionDef_FloatList* CollectionDef_FloatList::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<CollectionDef_FloatList>(arena);
}
void CollectionDef_FloatList::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.CollectionDef.FloatList)
value_.Clear();
}
bool CollectionDef_FloatList::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.CollectionDef.FloatList)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated float value = 1 [packed = true];
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPackedPrimitive<
float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>(
input, this->mutable_value())));
} else if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(13u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline<
float, ::google::protobuf::internal::WireFormatLite::TYPE_FLOAT>(
1, 10u, input, this->mutable_value())));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.CollectionDef.FloatList)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.CollectionDef.FloatList)
return false;
#undef DO_
}
void CollectionDef_FloatList::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.CollectionDef.FloatList)
// repeated float value = 1 [packed = true];
if (this->value_size() > 0) {
::google::protobuf::internal::WireFormatLite::WriteTag(1, ::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output);
output->WriteVarint32(_value_cached_byte_size_);
::google::protobuf::internal::WireFormatLite::WriteFloatArray(
this->value().data(), this->value_size(), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.CollectionDef.FloatList)
}
::google::protobuf::uint8* CollectionDef_FloatList::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.CollectionDef.FloatList)
// repeated float value = 1 [packed = true];
if (this->value_size() > 0) {
target = ::google::protobuf::internal::WireFormatLite::WriteTagToArray(
1,
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED,
target);
target = ::google::protobuf::io::CodedOutputStream::WriteVarint32ToArray(
_value_cached_byte_size_, target);
}
for (int i = 0, n = this->value_size(); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteFloatNoTagToArray(this->value(i), target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.CollectionDef.FloatList)
return target;
}
size_t CollectionDef_FloatList::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.CollectionDef.FloatList)
size_t total_size = 0;
// repeated float value = 1 [packed = true];
{
unsigned int count = this->value_size();
size_t data_size = 4UL * count;
if (data_size > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(data_size);
}
int cached_size = ::google::protobuf::internal::ToCachedSize(data_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_value_cached_byte_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
total_size += data_size;
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void CollectionDef_FloatList::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.CollectionDef.FloatList)
GOOGLE_DCHECK_NE(&from, this);
const CollectionDef_FloatList* source =
::google::protobuf::internal::DynamicCastToGenerated<const CollectionDef_FloatList>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.CollectionDef.FloatList)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.CollectionDef.FloatList)
MergeFrom(*source);
}
}
void CollectionDef_FloatList::MergeFrom(const CollectionDef_FloatList& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.CollectionDef.FloatList)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
value_.MergeFrom(from.value_);
}
void CollectionDef_FloatList::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.CollectionDef.FloatList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void CollectionDef_FloatList::CopyFrom(const CollectionDef_FloatList& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.CollectionDef.FloatList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool CollectionDef_FloatList::IsInitialized() const {
return true;
}
void CollectionDef_FloatList::Swap(CollectionDef_FloatList* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
CollectionDef_FloatList* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void CollectionDef_FloatList::UnsafeArenaSwap(CollectionDef_FloatList* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void CollectionDef_FloatList::InternalSwap(CollectionDef_FloatList* other) {
value_.UnsafeArenaSwap(&other->value_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata CollectionDef_FloatList::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[7];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// CollectionDef_FloatList
// repeated float value = 1 [packed = true];
int CollectionDef_FloatList::value_size() const {
return value_.size();
}
void CollectionDef_FloatList::clear_value() {
value_.Clear();
}
float CollectionDef_FloatList::value(int index) const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.FloatList.value)
return value_.Get(index);
}
void CollectionDef_FloatList::set_value(int index, float value) {
value_.Set(index, value);
// @@protoc_insertion_point(field_set:tensorflow.CollectionDef.FloatList.value)
}
void CollectionDef_FloatList::add_value(float value) {
value_.Add(value);
// @@protoc_insertion_point(field_add:tensorflow.CollectionDef.FloatList.value)
}
const ::google::protobuf::RepeatedField< float >&
CollectionDef_FloatList::value() const {
// @@protoc_insertion_point(field_list:tensorflow.CollectionDef.FloatList.value)
return value_;
}
::google::protobuf::RepeatedField< float >*
CollectionDef_FloatList::mutable_value() {
// @@protoc_insertion_point(field_mutable_list:tensorflow.CollectionDef.FloatList.value)
return &value_;
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int CollectionDef_AnyList::kValueFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
CollectionDef_AnyList::CollectionDef_AnyList()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.CollectionDef.AnyList)
}
CollectionDef_AnyList::CollectionDef_AnyList(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
value_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.CollectionDef.AnyList)
}
CollectionDef_AnyList::CollectionDef_AnyList(const CollectionDef_AnyList& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
value_(from.value_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
// @@protoc_insertion_point(copy_constructor:tensorflow.CollectionDef.AnyList)
}
void CollectionDef_AnyList::SharedCtor() {
_cached_size_ = 0;
}
CollectionDef_AnyList::~CollectionDef_AnyList() {
// @@protoc_insertion_point(destructor:tensorflow.CollectionDef.AnyList)
SharedDtor();
}
void CollectionDef_AnyList::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
}
void CollectionDef_AnyList::ArenaDtor(void* object) {
CollectionDef_AnyList* _this = reinterpret_cast< CollectionDef_AnyList* >(object);
(void)_this;
}
void CollectionDef_AnyList::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void CollectionDef_AnyList::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* CollectionDef_AnyList::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[8].descriptor;
}
const CollectionDef_AnyList& CollectionDef_AnyList::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
CollectionDef_AnyList* CollectionDef_AnyList::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<CollectionDef_AnyList>(arena);
}
void CollectionDef_AnyList::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.CollectionDef.AnyList)
value_.Clear();
}
bool CollectionDef_AnyList::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.CollectionDef.AnyList)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated .google.protobuf.Any value = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(input->IncrementRecursionDepth());
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtualNoRecursionDepth(
input, add_value()));
} else {
goto handle_unusual;
}
input->UnsafeDecrementRecursionDepth();
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.CollectionDef.AnyList)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.CollectionDef.AnyList)
return false;
#undef DO_
}
void CollectionDef_AnyList::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.CollectionDef.AnyList)
// repeated .google.protobuf.Any value = 1;
for (unsigned int i = 0, n = this->value_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->value(i), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.CollectionDef.AnyList)
}
::google::protobuf::uint8* CollectionDef_AnyList::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.CollectionDef.AnyList)
// repeated .google.protobuf.Any value = 1;
for (unsigned int i = 0, n = this->value_size(); i < n; i++) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
1, this->value(i), false, target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.CollectionDef.AnyList)
return target;
}
size_t CollectionDef_AnyList::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.CollectionDef.AnyList)
size_t total_size = 0;
// repeated .google.protobuf.Any value = 1;
{
unsigned int count = this->value_size();
total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->value(i));
}
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void CollectionDef_AnyList::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.CollectionDef.AnyList)
GOOGLE_DCHECK_NE(&from, this);
const CollectionDef_AnyList* source =
::google::protobuf::internal::DynamicCastToGenerated<const CollectionDef_AnyList>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.CollectionDef.AnyList)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.CollectionDef.AnyList)
MergeFrom(*source);
}
}
void CollectionDef_AnyList::MergeFrom(const CollectionDef_AnyList& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.CollectionDef.AnyList)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
value_.MergeFrom(from.value_);
}
void CollectionDef_AnyList::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.CollectionDef.AnyList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void CollectionDef_AnyList::CopyFrom(const CollectionDef_AnyList& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.CollectionDef.AnyList)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool CollectionDef_AnyList::IsInitialized() const {
return true;
}
void CollectionDef_AnyList::Swap(CollectionDef_AnyList* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
CollectionDef_AnyList* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void CollectionDef_AnyList::UnsafeArenaSwap(CollectionDef_AnyList* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void CollectionDef_AnyList::InternalSwap(CollectionDef_AnyList* other) {
value_.UnsafeArenaSwap(&other->value_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata CollectionDef_AnyList::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[8];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// CollectionDef_AnyList
// repeated .google.protobuf.Any value = 1;
int CollectionDef_AnyList::value_size() const {
return value_.size();
}
void CollectionDef_AnyList::clear_value() {
value_.Clear();
}
const ::google::protobuf::Any& CollectionDef_AnyList::value(int index) const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.AnyList.value)
return value_.Get(index);
}
::google::protobuf::Any* CollectionDef_AnyList::mutable_value(int index) {
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.AnyList.value)
return value_.Mutable(index);
}
::google::protobuf::Any* CollectionDef_AnyList::add_value() {
// @@protoc_insertion_point(field_add:tensorflow.CollectionDef.AnyList.value)
return value_.Add();
}
::google::protobuf::RepeatedPtrField< ::google::protobuf::Any >*
CollectionDef_AnyList::mutable_value() {
// @@protoc_insertion_point(field_mutable_list:tensorflow.CollectionDef.AnyList.value)
return &value_;
}
const ::google::protobuf::RepeatedPtrField< ::google::protobuf::Any >&
CollectionDef_AnyList::value() const {
// @@protoc_insertion_point(field_list:tensorflow.CollectionDef.AnyList.value)
return value_;
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int CollectionDef::kNodeListFieldNumber;
const int CollectionDef::kBytesListFieldNumber;
const int CollectionDef::kInt64ListFieldNumber;
const int CollectionDef::kFloatListFieldNumber;
const int CollectionDef::kAnyListFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
CollectionDef::CollectionDef()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.CollectionDef)
}
CollectionDef::CollectionDef(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.CollectionDef)
}
CollectionDef::CollectionDef(const CollectionDef& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
clear_has_kind();
switch (from.kind_case()) {
case kNodeList: {
mutable_node_list()->::tensorflow::CollectionDef_NodeList::MergeFrom(from.node_list());
break;
}
case kBytesList: {
mutable_bytes_list()->::tensorflow::CollectionDef_BytesList::MergeFrom(from.bytes_list());
break;
}
case kInt64List: {
mutable_int64_list()->::tensorflow::CollectionDef_Int64List::MergeFrom(from.int64_list());
break;
}
case kFloatList: {
mutable_float_list()->::tensorflow::CollectionDef_FloatList::MergeFrom(from.float_list());
break;
}
case kAnyList: {
mutable_any_list()->::tensorflow::CollectionDef_AnyList::MergeFrom(from.any_list());
break;
}
case KIND_NOT_SET: {
break;
}
}
// @@protoc_insertion_point(copy_constructor:tensorflow.CollectionDef)
}
void CollectionDef::SharedCtor() {
clear_has_kind();
_cached_size_ = 0;
}
CollectionDef::~CollectionDef() {
// @@protoc_insertion_point(destructor:tensorflow.CollectionDef)
SharedDtor();
}
void CollectionDef::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
if (has_kind()) {
clear_kind();
}
}
void CollectionDef::ArenaDtor(void* object) {
CollectionDef* _this = reinterpret_cast< CollectionDef* >(object);
(void)_this;
}
void CollectionDef::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void CollectionDef::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* CollectionDef::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[9].descriptor;
}
const CollectionDef& CollectionDef::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
CollectionDef* CollectionDef::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<CollectionDef>(arena);
}
void CollectionDef::clear_kind() {
// @@protoc_insertion_point(one_of_clear_start:tensorflow.CollectionDef)
switch (kind_case()) {
case kNodeList: {
if (GetArenaNoVirtual() == NULL) {
delete kind_.node_list_;
}
break;
}
case kBytesList: {
if (GetArenaNoVirtual() == NULL) {
delete kind_.bytes_list_;
}
break;
}
case kInt64List: {
if (GetArenaNoVirtual() == NULL) {
delete kind_.int64_list_;
}
break;
}
case kFloatList: {
if (GetArenaNoVirtual() == NULL) {
delete kind_.float_list_;
}
break;
}
case kAnyList: {
if (GetArenaNoVirtual() == NULL) {
delete kind_.any_list_;
}
break;
}
case KIND_NOT_SET: {
break;
}
}
_oneof_case_[0] = KIND_NOT_SET;
}
void CollectionDef::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.CollectionDef)
clear_kind();
}
bool CollectionDef::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.CollectionDef)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// .tensorflow.CollectionDef.NodeList node_list = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_node_list()));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.CollectionDef.BytesList bytes_list = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_bytes_list()));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.CollectionDef.Int64List int64_list = 3;
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(26u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_int64_list()));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.CollectionDef.FloatList float_list = 4;
case 4: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(34u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_float_list()));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.CollectionDef.AnyList any_list = 5;
case 5: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(42u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_any_list()));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.CollectionDef)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.CollectionDef)
return false;
#undef DO_
}
void CollectionDef::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.CollectionDef)
// .tensorflow.CollectionDef.NodeList node_list = 1;
if (has_node_list()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, *kind_.node_list_, output);
}
// .tensorflow.CollectionDef.BytesList bytes_list = 2;
if (has_bytes_list()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, *kind_.bytes_list_, output);
}
// .tensorflow.CollectionDef.Int64List int64_list = 3;
if (has_int64_list()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, *kind_.int64_list_, output);
}
// .tensorflow.CollectionDef.FloatList float_list = 4;
if (has_float_list()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
4, *kind_.float_list_, output);
}
// .tensorflow.CollectionDef.AnyList any_list = 5;
if (has_any_list()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
5, *kind_.any_list_, output);
}
<|fim▁hole|> bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.CollectionDef)
// .tensorflow.CollectionDef.NodeList node_list = 1;
if (has_node_list()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
1, *kind_.node_list_, false, target);
}
// .tensorflow.CollectionDef.BytesList bytes_list = 2;
if (has_bytes_list()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
2, *kind_.bytes_list_, false, target);
}
// .tensorflow.CollectionDef.Int64List int64_list = 3;
if (has_int64_list()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
3, *kind_.int64_list_, false, target);
}
// .tensorflow.CollectionDef.FloatList float_list = 4;
if (has_float_list()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
4, *kind_.float_list_, false, target);
}
// .tensorflow.CollectionDef.AnyList any_list = 5;
if (has_any_list()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
5, *kind_.any_list_, false, target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.CollectionDef)
return target;
}
size_t CollectionDef::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.CollectionDef)
size_t total_size = 0;
switch (kind_case()) {
// .tensorflow.CollectionDef.NodeList node_list = 1;
case kNodeList: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*kind_.node_list_);
break;
}
// .tensorflow.CollectionDef.BytesList bytes_list = 2;
case kBytesList: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*kind_.bytes_list_);
break;
}
// .tensorflow.CollectionDef.Int64List int64_list = 3;
case kInt64List: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*kind_.int64_list_);
break;
}
// .tensorflow.CollectionDef.FloatList float_list = 4;
case kFloatList: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*kind_.float_list_);
break;
}
// .tensorflow.CollectionDef.AnyList any_list = 5;
case kAnyList: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*kind_.any_list_);
break;
}
case KIND_NOT_SET: {
break;
}
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void CollectionDef::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.CollectionDef)
GOOGLE_DCHECK_NE(&from, this);
const CollectionDef* source =
::google::protobuf::internal::DynamicCastToGenerated<const CollectionDef>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.CollectionDef)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.CollectionDef)
MergeFrom(*source);
}
}
void CollectionDef::MergeFrom(const CollectionDef& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.CollectionDef)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
switch (from.kind_case()) {
case kNodeList: {
mutable_node_list()->::tensorflow::CollectionDef_NodeList::MergeFrom(from.node_list());
break;
}
case kBytesList: {
mutable_bytes_list()->::tensorflow::CollectionDef_BytesList::MergeFrom(from.bytes_list());
break;
}
case kInt64List: {
mutable_int64_list()->::tensorflow::CollectionDef_Int64List::MergeFrom(from.int64_list());
break;
}
case kFloatList: {
mutable_float_list()->::tensorflow::CollectionDef_FloatList::MergeFrom(from.float_list());
break;
}
case kAnyList: {
mutable_any_list()->::tensorflow::CollectionDef_AnyList::MergeFrom(from.any_list());
break;
}
case KIND_NOT_SET: {
break;
}
}
}
void CollectionDef::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.CollectionDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void CollectionDef::CopyFrom(const CollectionDef& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.CollectionDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool CollectionDef::IsInitialized() const {
return true;
}
void CollectionDef::Swap(CollectionDef* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
CollectionDef* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void CollectionDef::UnsafeArenaSwap(CollectionDef* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void CollectionDef::InternalSwap(CollectionDef* other) {
std::swap(kind_, other->kind_);
std::swap(_oneof_case_[0], other->_oneof_case_[0]);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata CollectionDef::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[9];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// CollectionDef
// .tensorflow.CollectionDef.NodeList node_list = 1;
bool CollectionDef::has_node_list() const {
return kind_case() == kNodeList;
}
void CollectionDef::set_has_node_list() {
_oneof_case_[0] = kNodeList;
}
void CollectionDef::clear_node_list() {
if (has_node_list()) {
if (GetArenaNoVirtual() == NULL) {
delete kind_.node_list_;
}
clear_has_kind();
}
}
const ::tensorflow::CollectionDef_NodeList& CollectionDef::node_list() const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.node_list)
return has_node_list()
? *kind_.node_list_
: ::tensorflow::CollectionDef_NodeList::default_instance();
}
::tensorflow::CollectionDef_NodeList* CollectionDef::mutable_node_list() {
if (!has_node_list()) {
clear_kind();
set_has_node_list();
kind_.node_list_ =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_NodeList >(
GetArenaNoVirtual());
}
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.node_list)
return kind_.node_list_;
}
::tensorflow::CollectionDef_NodeList* CollectionDef::release_node_list() {
// @@protoc_insertion_point(field_release:tensorflow.CollectionDef.node_list)
if (has_node_list()) {
clear_has_kind();
if (GetArenaNoVirtual() != NULL) {
::tensorflow::CollectionDef_NodeList* temp = new ::tensorflow::CollectionDef_NodeList(*kind_.node_list_);
kind_.node_list_ = NULL;
return temp;
} else {
::tensorflow::CollectionDef_NodeList* temp = kind_.node_list_;
kind_.node_list_ = NULL;
return temp;
}
} else {
return NULL;
}
}
void CollectionDef::set_allocated_node_list(::tensorflow::CollectionDef_NodeList* node_list) {
clear_kind();
if (node_list) {
if (GetArenaNoVirtual() != NULL &&
::google::protobuf::Arena::GetArena(node_list) == NULL) {
GetArenaNoVirtual()->Own(node_list);
} else if (GetArenaNoVirtual() !=
::google::protobuf::Arena::GetArena(node_list)) {
::tensorflow::CollectionDef_NodeList* new_node_list =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_NodeList >(
GetArenaNoVirtual());
new_node_list->CopyFrom(*node_list);
node_list = new_node_list;
}
set_has_node_list();
kind_.node_list_ = node_list;
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.CollectionDef.node_list)
}
::tensorflow::CollectionDef_NodeList* CollectionDef::unsafe_arena_release_node_list() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CollectionDef.node_list)
if (has_node_list()) {
clear_has_kind();
::tensorflow::CollectionDef_NodeList* temp = kind_.node_list_;
kind_.node_list_ = NULL;
return temp;
} else {
return NULL;
}
}
void CollectionDef::unsafe_arena_set_allocated_node_list(::tensorflow::CollectionDef_NodeList* node_list) {
clear_kind();
if (node_list) {
set_has_node_list();
kind_.node_list_ = node_list;
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CollectionDef.node_list)
}
// .tensorflow.CollectionDef.BytesList bytes_list = 2;
bool CollectionDef::has_bytes_list() const {
return kind_case() == kBytesList;
}
void CollectionDef::set_has_bytes_list() {
_oneof_case_[0] = kBytesList;
}
void CollectionDef::clear_bytes_list() {
if (has_bytes_list()) {
if (GetArenaNoVirtual() == NULL) {
delete kind_.bytes_list_;
}
clear_has_kind();
}
}
const ::tensorflow::CollectionDef_BytesList& CollectionDef::bytes_list() const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.bytes_list)
return has_bytes_list()
? *kind_.bytes_list_
: ::tensorflow::CollectionDef_BytesList::default_instance();
}
::tensorflow::CollectionDef_BytesList* CollectionDef::mutable_bytes_list() {
if (!has_bytes_list()) {
clear_kind();
set_has_bytes_list();
kind_.bytes_list_ =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_BytesList >(
GetArenaNoVirtual());
}
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.bytes_list)
return kind_.bytes_list_;
}
::tensorflow::CollectionDef_BytesList* CollectionDef::release_bytes_list() {
// @@protoc_insertion_point(field_release:tensorflow.CollectionDef.bytes_list)
if (has_bytes_list()) {
clear_has_kind();
if (GetArenaNoVirtual() != NULL) {
::tensorflow::CollectionDef_BytesList* temp = new ::tensorflow::CollectionDef_BytesList(*kind_.bytes_list_);
kind_.bytes_list_ = NULL;
return temp;
} else {
::tensorflow::CollectionDef_BytesList* temp = kind_.bytes_list_;
kind_.bytes_list_ = NULL;
return temp;
}
} else {
return NULL;
}
}
void CollectionDef::set_allocated_bytes_list(::tensorflow::CollectionDef_BytesList* bytes_list) {
clear_kind();
if (bytes_list) {
if (GetArenaNoVirtual() != NULL &&
::google::protobuf::Arena::GetArena(bytes_list) == NULL) {
GetArenaNoVirtual()->Own(bytes_list);
} else if (GetArenaNoVirtual() !=
::google::protobuf::Arena::GetArena(bytes_list)) {
::tensorflow::CollectionDef_BytesList* new_bytes_list =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_BytesList >(
GetArenaNoVirtual());
new_bytes_list->CopyFrom(*bytes_list);
bytes_list = new_bytes_list;
}
set_has_bytes_list();
kind_.bytes_list_ = bytes_list;
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.CollectionDef.bytes_list)
}
::tensorflow::CollectionDef_BytesList* CollectionDef::unsafe_arena_release_bytes_list() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CollectionDef.bytes_list)
if (has_bytes_list()) {
clear_has_kind();
::tensorflow::CollectionDef_BytesList* temp = kind_.bytes_list_;
kind_.bytes_list_ = NULL;
return temp;
} else {
return NULL;
}
}
void CollectionDef::unsafe_arena_set_allocated_bytes_list(::tensorflow::CollectionDef_BytesList* bytes_list) {
clear_kind();
if (bytes_list) {
set_has_bytes_list();
kind_.bytes_list_ = bytes_list;
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CollectionDef.bytes_list)
}
// .tensorflow.CollectionDef.Int64List int64_list = 3;
bool CollectionDef::has_int64_list() const {
return kind_case() == kInt64List;
}
void CollectionDef::set_has_int64_list() {
_oneof_case_[0] = kInt64List;
}
void CollectionDef::clear_int64_list() {
if (has_int64_list()) {
if (GetArenaNoVirtual() == NULL) {
delete kind_.int64_list_;
}
clear_has_kind();
}
}
const ::tensorflow::CollectionDef_Int64List& CollectionDef::int64_list() const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.int64_list)
return has_int64_list()
? *kind_.int64_list_
: ::tensorflow::CollectionDef_Int64List::default_instance();
}
::tensorflow::CollectionDef_Int64List* CollectionDef::mutable_int64_list() {
if (!has_int64_list()) {
clear_kind();
set_has_int64_list();
kind_.int64_list_ =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_Int64List >(
GetArenaNoVirtual());
}
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.int64_list)
return kind_.int64_list_;
}
::tensorflow::CollectionDef_Int64List* CollectionDef::release_int64_list() {
// @@protoc_insertion_point(field_release:tensorflow.CollectionDef.int64_list)
if (has_int64_list()) {
clear_has_kind();
if (GetArenaNoVirtual() != NULL) {
::tensorflow::CollectionDef_Int64List* temp = new ::tensorflow::CollectionDef_Int64List(*kind_.int64_list_);
kind_.int64_list_ = NULL;
return temp;
} else {
::tensorflow::CollectionDef_Int64List* temp = kind_.int64_list_;
kind_.int64_list_ = NULL;
return temp;
}
} else {
return NULL;
}
}
void CollectionDef::set_allocated_int64_list(::tensorflow::CollectionDef_Int64List* int64_list) {
clear_kind();
if (int64_list) {
if (GetArenaNoVirtual() != NULL &&
::google::protobuf::Arena::GetArena(int64_list) == NULL) {
GetArenaNoVirtual()->Own(int64_list);
} else if (GetArenaNoVirtual() !=
::google::protobuf::Arena::GetArena(int64_list)) {
::tensorflow::CollectionDef_Int64List* new_int64_list =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_Int64List >(
GetArenaNoVirtual());
new_int64_list->CopyFrom(*int64_list);
int64_list = new_int64_list;
}
set_has_int64_list();
kind_.int64_list_ = int64_list;
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.CollectionDef.int64_list)
}
::tensorflow::CollectionDef_Int64List* CollectionDef::unsafe_arena_release_int64_list() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CollectionDef.int64_list)
if (has_int64_list()) {
clear_has_kind();
::tensorflow::CollectionDef_Int64List* temp = kind_.int64_list_;
kind_.int64_list_ = NULL;
return temp;
} else {
return NULL;
}
}
void CollectionDef::unsafe_arena_set_allocated_int64_list(::tensorflow::CollectionDef_Int64List* int64_list) {
clear_kind();
if (int64_list) {
set_has_int64_list();
kind_.int64_list_ = int64_list;
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CollectionDef.int64_list)
}
// .tensorflow.CollectionDef.FloatList float_list = 4;
bool CollectionDef::has_float_list() const {
return kind_case() == kFloatList;
}
void CollectionDef::set_has_float_list() {
_oneof_case_[0] = kFloatList;
}
void CollectionDef::clear_float_list() {
if (has_float_list()) {
if (GetArenaNoVirtual() == NULL) {
delete kind_.float_list_;
}
clear_has_kind();
}
}
const ::tensorflow::CollectionDef_FloatList& CollectionDef::float_list() const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.float_list)
return has_float_list()
? *kind_.float_list_
: ::tensorflow::CollectionDef_FloatList::default_instance();
}
::tensorflow::CollectionDef_FloatList* CollectionDef::mutable_float_list() {
if (!has_float_list()) {
clear_kind();
set_has_float_list();
kind_.float_list_ =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_FloatList >(
GetArenaNoVirtual());
}
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.float_list)
return kind_.float_list_;
}
::tensorflow::CollectionDef_FloatList* CollectionDef::release_float_list() {
// @@protoc_insertion_point(field_release:tensorflow.CollectionDef.float_list)
if (has_float_list()) {
clear_has_kind();
if (GetArenaNoVirtual() != NULL) {
::tensorflow::CollectionDef_FloatList* temp = new ::tensorflow::CollectionDef_FloatList(*kind_.float_list_);
kind_.float_list_ = NULL;
return temp;
} else {
::tensorflow::CollectionDef_FloatList* temp = kind_.float_list_;
kind_.float_list_ = NULL;
return temp;
}
} else {
return NULL;
}
}
void CollectionDef::set_allocated_float_list(::tensorflow::CollectionDef_FloatList* float_list) {
clear_kind();
if (float_list) {
if (GetArenaNoVirtual() != NULL &&
::google::protobuf::Arena::GetArena(float_list) == NULL) {
GetArenaNoVirtual()->Own(float_list);
} else if (GetArenaNoVirtual() !=
::google::protobuf::Arena::GetArena(float_list)) {
::tensorflow::CollectionDef_FloatList* new_float_list =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_FloatList >(
GetArenaNoVirtual());
new_float_list->CopyFrom(*float_list);
float_list = new_float_list;
}
set_has_float_list();
kind_.float_list_ = float_list;
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.CollectionDef.float_list)
}
::tensorflow::CollectionDef_FloatList* CollectionDef::unsafe_arena_release_float_list() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CollectionDef.float_list)
if (has_float_list()) {
clear_has_kind();
::tensorflow::CollectionDef_FloatList* temp = kind_.float_list_;
kind_.float_list_ = NULL;
return temp;
} else {
return NULL;
}
}
void CollectionDef::unsafe_arena_set_allocated_float_list(::tensorflow::CollectionDef_FloatList* float_list) {
clear_kind();
if (float_list) {
set_has_float_list();
kind_.float_list_ = float_list;
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CollectionDef.float_list)
}
// .tensorflow.CollectionDef.AnyList any_list = 5;
bool CollectionDef::has_any_list() const {
return kind_case() == kAnyList;
}
void CollectionDef::set_has_any_list() {
_oneof_case_[0] = kAnyList;
}
void CollectionDef::clear_any_list() {
if (has_any_list()) {
if (GetArenaNoVirtual() == NULL) {
delete kind_.any_list_;
}
clear_has_kind();
}
}
const ::tensorflow::CollectionDef_AnyList& CollectionDef::any_list() const {
// @@protoc_insertion_point(field_get:tensorflow.CollectionDef.any_list)
return has_any_list()
? *kind_.any_list_
: ::tensorflow::CollectionDef_AnyList::default_instance();
}
::tensorflow::CollectionDef_AnyList* CollectionDef::mutable_any_list() {
if (!has_any_list()) {
clear_kind();
set_has_any_list();
kind_.any_list_ =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_AnyList >(
GetArenaNoVirtual());
}
// @@protoc_insertion_point(field_mutable:tensorflow.CollectionDef.any_list)
return kind_.any_list_;
}
::tensorflow::CollectionDef_AnyList* CollectionDef::release_any_list() {
// @@protoc_insertion_point(field_release:tensorflow.CollectionDef.any_list)
if (has_any_list()) {
clear_has_kind();
if (GetArenaNoVirtual() != NULL) {
::tensorflow::CollectionDef_AnyList* temp = new ::tensorflow::CollectionDef_AnyList(*kind_.any_list_);
kind_.any_list_ = NULL;
return temp;
} else {
::tensorflow::CollectionDef_AnyList* temp = kind_.any_list_;
kind_.any_list_ = NULL;
return temp;
}
} else {
return NULL;
}
}
void CollectionDef::set_allocated_any_list(::tensorflow::CollectionDef_AnyList* any_list) {
clear_kind();
if (any_list) {
if (GetArenaNoVirtual() != NULL &&
::google::protobuf::Arena::GetArena(any_list) == NULL) {
GetArenaNoVirtual()->Own(any_list);
} else if (GetArenaNoVirtual() !=
::google::protobuf::Arena::GetArena(any_list)) {
::tensorflow::CollectionDef_AnyList* new_any_list =
::google::protobuf::Arena::CreateMessage< ::tensorflow::CollectionDef_AnyList >(
GetArenaNoVirtual());
new_any_list->CopyFrom(*any_list);
any_list = new_any_list;
}
set_has_any_list();
kind_.any_list_ = any_list;
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.CollectionDef.any_list)
}
::tensorflow::CollectionDef_AnyList* CollectionDef::unsafe_arena_release_any_list() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CollectionDef.any_list)
if (has_any_list()) {
clear_has_kind();
::tensorflow::CollectionDef_AnyList* temp = kind_.any_list_;
kind_.any_list_ = NULL;
return temp;
} else {
return NULL;
}
}
void CollectionDef::unsafe_arena_set_allocated_any_list(::tensorflow::CollectionDef_AnyList* any_list) {
clear_kind();
if (any_list) {
set_has_any_list();
kind_.any_list_ = any_list;
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CollectionDef.any_list)
}
bool CollectionDef::has_kind() const {
return kind_case() != KIND_NOT_SET;
}
void CollectionDef::clear_has_kind() {
_oneof_case_[0] = KIND_NOT_SET;
}
CollectionDef::KindCase CollectionDef::kind_case() const {
return CollectionDef::KindCase(_oneof_case_[0]);
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
void TensorInfo::_slow_mutable_tensor_shape() {
tensor_shape_ = ::google::protobuf::Arena::CreateMessage< ::tensorflow::TensorShapeProto >(
GetArenaNoVirtual());
}
::tensorflow::TensorShapeProto* TensorInfo::_slow_release_tensor_shape() {
if (tensor_shape_ == NULL) {
return NULL;
} else {
::tensorflow::TensorShapeProto* temp = new ::tensorflow::TensorShapeProto(*tensor_shape_);
tensor_shape_ = NULL;
return temp;
}
}
::tensorflow::TensorShapeProto* TensorInfo::unsafe_arena_release_tensor_shape() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.TensorInfo.tensor_shape)
::tensorflow::TensorShapeProto* temp = tensor_shape_;
tensor_shape_ = NULL;
return temp;
}
void TensorInfo::_slow_set_allocated_tensor_shape(
::google::protobuf::Arena* message_arena, ::tensorflow::TensorShapeProto** tensor_shape) {
if (message_arena != NULL &&
::google::protobuf::Arena::GetArena(*tensor_shape) == NULL) {
message_arena->Own(*tensor_shape);
} else if (message_arena !=
::google::protobuf::Arena::GetArena(*tensor_shape)) {
::tensorflow::TensorShapeProto* new_tensor_shape =
::google::protobuf::Arena::CreateMessage< ::tensorflow::TensorShapeProto >(
message_arena);
new_tensor_shape->CopyFrom(**tensor_shape);
*tensor_shape = new_tensor_shape;
}
}
void TensorInfo::unsafe_arena_set_allocated_tensor_shape(
::tensorflow::TensorShapeProto* tensor_shape) {
if (GetArenaNoVirtual() == NULL) {
delete tensor_shape_;
}
tensor_shape_ = tensor_shape;
if (tensor_shape) {
} else {
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TensorInfo.tensor_shape)
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int TensorInfo::kNameFieldNumber;
const int TensorInfo::kDtypeFieldNumber;
const int TensorInfo::kTensorShapeFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
TensorInfo::TensorInfo()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.TensorInfo)
}
TensorInfo::TensorInfo(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.TensorInfo)
}
TensorInfo::TensorInfo(const TensorInfo& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.name().size() > 0) {
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name(),
GetArenaNoVirtual());
}
if (from.has_tensor_shape()) {
tensor_shape_ = new ::tensorflow::TensorShapeProto(*from.tensor_shape_);
} else {
tensor_shape_ = NULL;
}
dtype_ = from.dtype_;
// @@protoc_insertion_point(copy_constructor:tensorflow.TensorInfo)
}
void TensorInfo::SharedCtor() {
name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
::memset(&tensor_shape_, 0, reinterpret_cast<char*>(&dtype_) -
reinterpret_cast<char*>(&tensor_shape_) + sizeof(dtype_));
_cached_size_ = 0;
}
TensorInfo::~TensorInfo() {
// @@protoc_insertion_point(destructor:tensorflow.TensorInfo)
SharedDtor();
}
void TensorInfo::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
name_.Destroy(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), arena);
if (this != internal_default_instance()) {
delete tensor_shape_;
}
}
void TensorInfo::ArenaDtor(void* object) {
TensorInfo* _this = reinterpret_cast< TensorInfo* >(object);
(void)_this;
}
void TensorInfo::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void TensorInfo::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* TensorInfo::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[10].descriptor;
}
const TensorInfo& TensorInfo::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
TensorInfo* TensorInfo::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<TensorInfo>(arena);
}
void TensorInfo::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.TensorInfo)
name_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
if (GetArenaNoVirtual() == NULL && tensor_shape_ != NULL) {
delete tensor_shape_;
}
tensor_shape_ = NULL;
dtype_ = 0;
}
bool TensorInfo::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.TensorInfo)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// string name = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_name()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->name().data(), this->name().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.TensorInfo.name"));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.DataType dtype = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(16u)) {
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
set_dtype(static_cast< ::tensorflow::DataType >(value));
} else {
goto handle_unusual;
}
break;
}
// .tensorflow.TensorShapeProto tensor_shape = 3;
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(26u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_tensor_shape()));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.TensorInfo)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.TensorInfo)
return false;
#undef DO_
}
void TensorInfo::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.TensorInfo)
// string name = 1;
if (this->name().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->name().data(), this->name().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.TensorInfo.name");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
1, this->name(), output);
}
// .tensorflow.DataType dtype = 2;
if (this->dtype() != 0) {
::google::protobuf::internal::WireFormatLite::WriteEnum(
2, this->dtype(), output);
}
// .tensorflow.TensorShapeProto tensor_shape = 3;
if (this->has_tensor_shape()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, *this->tensor_shape_, output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.TensorInfo)
}
::google::protobuf::uint8* TensorInfo::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.TensorInfo)
// string name = 1;
if (this->name().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->name().data(), this->name().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.TensorInfo.name");
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
1, this->name(), target);
}
// .tensorflow.DataType dtype = 2;
if (this->dtype() != 0) {
target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(
2, this->dtype(), target);
}
// .tensorflow.TensorShapeProto tensor_shape = 3;
if (this->has_tensor_shape()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
3, *this->tensor_shape_, false, target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.TensorInfo)
return target;
}
size_t TensorInfo::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.TensorInfo)
size_t total_size = 0;
// string name = 1;
if (this->name().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->name());
}
// .tensorflow.TensorShapeProto tensor_shape = 3;
if (this->has_tensor_shape()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->tensor_shape_);
}
// .tensorflow.DataType dtype = 2;
if (this->dtype() != 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::EnumSize(this->dtype());
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void TensorInfo::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.TensorInfo)
GOOGLE_DCHECK_NE(&from, this);
const TensorInfo* source =
::google::protobuf::internal::DynamicCastToGenerated<const TensorInfo>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.TensorInfo)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.TensorInfo)
MergeFrom(*source);
}
}
void TensorInfo::MergeFrom(const TensorInfo& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.TensorInfo)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
if (from.name().size() > 0) {
set_name(from.name());
}
if (from.has_tensor_shape()) {
mutable_tensor_shape()->::tensorflow::TensorShapeProto::MergeFrom(from.tensor_shape());
}
if (from.dtype() != 0) {
set_dtype(from.dtype());
}
}
void TensorInfo::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.TensorInfo)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void TensorInfo::CopyFrom(const TensorInfo& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.TensorInfo)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool TensorInfo::IsInitialized() const {
return true;
}
void TensorInfo::Swap(TensorInfo* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
TensorInfo* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void TensorInfo::UnsafeArenaSwap(TensorInfo* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void TensorInfo::InternalSwap(TensorInfo* other) {
name_.Swap(&other->name_);
std::swap(tensor_shape_, other->tensor_shape_);
std::swap(dtype_, other->dtype_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata TensorInfo::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[10];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// TensorInfo
// string name = 1;
void TensorInfo::clear_name() {
name_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
const ::std::string& TensorInfo::name() const {
// @@protoc_insertion_point(field_get:tensorflow.TensorInfo.name)
return name_.Get();
}
void TensorInfo::set_name(const ::std::string& value) {
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
// @@protoc_insertion_point(field_set:tensorflow.TensorInfo.name)
}
void TensorInfo::set_name(const char* value) {
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_char:tensorflow.TensorInfo.name)
}
void TensorInfo::set_name(const char* value,
size_t size) {
name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_pointer:tensorflow.TensorInfo.name)
}
::std::string* TensorInfo::mutable_name() {
// @@protoc_insertion_point(field_mutable:tensorflow.TensorInfo.name)
return name_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* TensorInfo::release_name() {
// @@protoc_insertion_point(field_release:tensorflow.TensorInfo.name)
return name_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* TensorInfo::unsafe_arena_release_name() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.TensorInfo.name)
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
return name_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
GetArenaNoVirtual());
}
void TensorInfo::set_allocated_name(::std::string* name) {
if (name != NULL) {
} else {
}
name_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name,
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_allocated:tensorflow.TensorInfo.name)
}
void TensorInfo::unsafe_arena_set_allocated_name(
::std::string* name) {
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
if (name != NULL) {
} else {
}
name_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
name, GetArenaNoVirtual());
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TensorInfo.name)
}
// .tensorflow.DataType dtype = 2;
void TensorInfo::clear_dtype() {
dtype_ = 0;
}
::tensorflow::DataType TensorInfo::dtype() const {
// @@protoc_insertion_point(field_get:tensorflow.TensorInfo.dtype)
return static_cast< ::tensorflow::DataType >(dtype_);
}
void TensorInfo::set_dtype(::tensorflow::DataType value) {
dtype_ = value;
// @@protoc_insertion_point(field_set:tensorflow.TensorInfo.dtype)
}
// .tensorflow.TensorShapeProto tensor_shape = 3;
bool TensorInfo::has_tensor_shape() const {
return this != internal_default_instance() && tensor_shape_ != NULL;
}
void TensorInfo::clear_tensor_shape() {
if (GetArenaNoVirtual() == NULL && tensor_shape_ != NULL) delete tensor_shape_;
tensor_shape_ = NULL;
}
const ::tensorflow::TensorShapeProto& TensorInfo::tensor_shape() const {
// @@protoc_insertion_point(field_get:tensorflow.TensorInfo.tensor_shape)
return tensor_shape_ != NULL ? *tensor_shape_
: *::tensorflow::TensorShapeProto::internal_default_instance();
}
::tensorflow::TensorShapeProto* TensorInfo::mutable_tensor_shape() {
if (tensor_shape_ == NULL) {
_slow_mutable_tensor_shape();
}
// @@protoc_insertion_point(field_mutable:tensorflow.TensorInfo.tensor_shape)
return tensor_shape_;
}
::tensorflow::TensorShapeProto* TensorInfo::release_tensor_shape() {
// @@protoc_insertion_point(field_release:tensorflow.TensorInfo.tensor_shape)
if (GetArenaNoVirtual() != NULL) {
return _slow_release_tensor_shape();
} else {
::tensorflow::TensorShapeProto* temp = tensor_shape_;
tensor_shape_ = NULL;
return temp;
}
}
void TensorInfo::set_allocated_tensor_shape(::tensorflow::TensorShapeProto* tensor_shape) {
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
if (message_arena == NULL) {
delete tensor_shape_;
}
if (tensor_shape != NULL) {
_slow_set_allocated_tensor_shape(message_arena, &tensor_shape);
}
tensor_shape_ = tensor_shape;
if (tensor_shape) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.TensorInfo.tensor_shape)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if PROTOBUF_INLINE_NOT_IN_HEADERS
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if PROTOBUF_INLINE_NOT_IN_HEADERS
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int SignatureDef::kInputsFieldNumber;
const int SignatureDef::kOutputsFieldNumber;
const int SignatureDef::kMethodNameFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
SignatureDef::SignatureDef()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.SignatureDef)
}
SignatureDef::SignatureDef(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena),
inputs_(arena),
outputs_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.SignatureDef)
}
SignatureDef::SignatureDef(const SignatureDef& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
const ::google::protobuf::Descriptor*& SignatureDef_InputsEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[11].descriptor;
const ::google::protobuf::Descriptor*& SignatureDef_OutputsEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[12].descriptor;
inputs_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
inputs_.SetEntryDescriptor(
&SignatureDef_InputsEntry_descriptor);
inputs_.MergeFrom(from.inputs_);
outputs_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
outputs_.SetEntryDescriptor(
&SignatureDef_OutputsEntry_descriptor);
outputs_.MergeFrom(from.outputs_);
method_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.method_name().size() > 0) {
method_name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.method_name(),
GetArenaNoVirtual());
}
// @@protoc_insertion_point(copy_constructor:tensorflow.SignatureDef)
}
void SignatureDef::SharedCtor() {
const ::google::protobuf::Descriptor*& SignatureDef_InputsEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[11].descriptor;
const ::google::protobuf::Descriptor*& SignatureDef_OutputsEntry_descriptor = protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[12].descriptor;
inputs_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
inputs_.SetEntryDescriptor(
&SignatureDef_InputsEntry_descriptor);
outputs_.SetAssignDescriptorCallback(
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce);
outputs_.SetEntryDescriptor(
&SignatureDef_OutputsEntry_descriptor);
method_name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
_cached_size_ = 0;
}
SignatureDef::~SignatureDef() {
// @@protoc_insertion_point(destructor:tensorflow.SignatureDef)
SharedDtor();
}
void SignatureDef::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
method_name_.Destroy(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), arena);
}
void SignatureDef::ArenaDtor(void* object) {
SignatureDef* _this = reinterpret_cast< SignatureDef* >(object);
(void)_this;
}
void SignatureDef::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void SignatureDef::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* SignatureDef::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[13].descriptor;
}
const SignatureDef& SignatureDef::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
SignatureDef* SignatureDef::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<SignatureDef>(arena);
}
void SignatureDef::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.SignatureDef)
inputs_.Clear();
outputs_.Clear();
method_name_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
bool SignatureDef::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.SignatureDef)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// map<string, .tensorflow.TensorInfo> inputs = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(input->IncrementRecursionDepth());
SignatureDef_InputsEntry::Parser< ::google::protobuf::internal::MapField<
::std::string, ::tensorflow::TensorInfo,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0 >,
::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo > > parser(&inputs_);
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, &parser));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
parser.key().data(), parser.key().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.SignatureDef.InputsEntry.key"));
} else {
goto handle_unusual;
}
input->UnsafeDecrementRecursionDepth();
break;
}
// map<string, .tensorflow.TensorInfo> outputs = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_(input->IncrementRecursionDepth());
SignatureDef_OutputsEntry::Parser< ::google::protobuf::internal::MapField<
::std::string, ::tensorflow::TensorInfo,
::google::protobuf::internal::WireFormatLite::TYPE_STRING,
::google::protobuf::internal::WireFormatLite::TYPE_MESSAGE,
0 >,
::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo > > parser(&outputs_);
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, &parser));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
parser.key().data(), parser.key().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.SignatureDef.OutputsEntry.key"));
} else {
goto handle_unusual;
}
input->UnsafeDecrementRecursionDepth();
break;
}
// string method_name = 3;
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(26u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_method_name()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->method_name().data(), this->method_name().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.SignatureDef.method_name"));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.SignatureDef)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.SignatureDef)
return false;
#undef DO_
}
void SignatureDef::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.SignatureDef)
// map<string, .tensorflow.TensorInfo> inputs = 1;
if (!this->inputs().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.SignatureDef.InputsEntry.key");
}
};
if (output->IsSerializationDeterministic() &&
this->inputs().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->inputs().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->inputs().begin();
it != this->inputs().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<SignatureDef_InputsEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(inputs_.NewEntryWrapper(
items[i]->first, items[i]->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<SignatureDef_InputsEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->inputs().begin();
it != this->inputs().end(); ++it) {
entry.reset(inputs_.NewEntryWrapper(
it->first, it->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// map<string, .tensorflow.TensorInfo> outputs = 2;
if (!this->outputs().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.SignatureDef.OutputsEntry.key");
}
};
if (output->IsSerializationDeterministic() &&
this->outputs().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->outputs().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->outputs().begin();
it != this->outputs().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<SignatureDef_OutputsEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(outputs_.NewEntryWrapper(
items[i]->first, items[i]->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<SignatureDef_OutputsEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->outputs().begin();
it != this->outputs().end(); ++it) {
entry.reset(outputs_.NewEntryWrapper(
it->first, it->second));
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, *entry, output);
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// string method_name = 3;
if (this->method_name().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->method_name().data(), this->method_name().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.SignatureDef.method_name");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
3, this->method_name(), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.SignatureDef)
}
::google::protobuf::uint8* SignatureDef::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.SignatureDef)
// map<string, .tensorflow.TensorInfo> inputs = 1;
if (!this->inputs().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.SignatureDef.InputsEntry.key");
}
};
if (deterministic &&
this->inputs().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->inputs().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->inputs().begin();
it != this->inputs().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<SignatureDef_InputsEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(inputs_.NewEntryWrapper(
items[i]->first, items[i]->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
1, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<SignatureDef_InputsEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->inputs().begin();
it != this->inputs().end(); ++it) {
entry.reset(inputs_.NewEntryWrapper(
it->first, it->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
1, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// map<string, .tensorflow.TensorInfo> outputs = 2;
if (!this->outputs().empty()) {
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_pointer
ConstPtr;
typedef ConstPtr SortItem;
typedef ::google::protobuf::internal::CompareByDerefFirst<SortItem> Less;
struct Utf8Check {
static void Check(ConstPtr p) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
p->first.data(), p->first.length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.SignatureDef.OutputsEntry.key");
}
};
if (deterministic &&
this->outputs().size() > 1) {
::google::protobuf::scoped_array<SortItem> items(
new SortItem[this->outputs().size()]);
typedef ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::size_type size_type;
size_type n = 0;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->outputs().begin();
it != this->outputs().end(); ++it, ++n) {
items[n] = SortItem(&*it);
}
::std::sort(&items[0], &items[n], Less());
::google::protobuf::scoped_ptr<SignatureDef_OutputsEntry> entry;
for (size_type i = 0; i < n; i++) {
entry.reset(outputs_.NewEntryWrapper(
items[i]->first, items[i]->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
2, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(items[i]);
}
} else {
::google::protobuf::scoped_ptr<SignatureDef_OutputsEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->outputs().begin();
it != this->outputs().end(); ++it) {
entry.reset(outputs_.NewEntryWrapper(
it->first, it->second));
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
2, *entry, deterministic, target);
;
if (entry->GetArena() != NULL) {
entry.release();
}
Utf8Check::Check(&*it);
}
}
}
// string method_name = 3;
if (this->method_name().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->method_name().data(), this->method_name().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.SignatureDef.method_name");
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
3, this->method_name(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.SignatureDef)
return target;
}
size_t SignatureDef::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.SignatureDef)
size_t total_size = 0;
// map<string, .tensorflow.TensorInfo> inputs = 1;
total_size += 1 *
::google::protobuf::internal::FromIntSize(this->inputs_size());
{
::google::protobuf::scoped_ptr<SignatureDef_InputsEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->inputs().begin();
it != this->inputs().end(); ++it) {
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
entry.reset(inputs_.NewEntryWrapper(it->first, it->second));
total_size += ::google::protobuf::internal::WireFormatLite::
MessageSizeNoVirtual(*entry);
}
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
}
// map<string, .tensorflow.TensorInfo> outputs = 2;
total_size += 1 *
::google::protobuf::internal::FromIntSize(this->outputs_size());
{
::google::protobuf::scoped_ptr<SignatureDef_OutputsEntry> entry;
for (::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >::const_iterator
it = this->outputs().begin();
it != this->outputs().end(); ++it) {
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
entry.reset(outputs_.NewEntryWrapper(it->first, it->second));
total_size += ::google::protobuf::internal::WireFormatLite::
MessageSizeNoVirtual(*entry);
}
if (entry.get() != NULL && entry->GetArena() != NULL) {
entry.release();
}
}
// string method_name = 3;
if (this->method_name().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->method_name());
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void SignatureDef::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.SignatureDef)
GOOGLE_DCHECK_NE(&from, this);
const SignatureDef* source =
::google::protobuf::internal::DynamicCastToGenerated<const SignatureDef>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.SignatureDef)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.SignatureDef)
MergeFrom(*source);
}
}
void SignatureDef::MergeFrom(const SignatureDef& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.SignatureDef)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
inputs_.MergeFrom(from.inputs_);
outputs_.MergeFrom(from.outputs_);
if (from.method_name().size() > 0) {
set_method_name(from.method_name());
}
}
void SignatureDef::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.SignatureDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void SignatureDef::CopyFrom(const SignatureDef& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.SignatureDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool SignatureDef::IsInitialized() const {
return true;
}
void SignatureDef::Swap(SignatureDef* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
SignatureDef* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void SignatureDef::UnsafeArenaSwap(SignatureDef* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void SignatureDef::InternalSwap(SignatureDef* other) {
inputs_.Swap(&other->inputs_);
outputs_.Swap(&other->outputs_);
method_name_.Swap(&other->method_name_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata SignatureDef::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[13];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// SignatureDef
// map<string, .tensorflow.TensorInfo> inputs = 1;
int SignatureDef::inputs_size() const {
return inputs_.size();
}
void SignatureDef::clear_inputs() {
inputs_.Clear();
}
const ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >&
SignatureDef::inputs() const {
// @@protoc_insertion_point(field_map:tensorflow.SignatureDef.inputs)
return inputs_.GetMap();
}
::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >*
SignatureDef::mutable_inputs() {
// @@protoc_insertion_point(field_mutable_map:tensorflow.SignatureDef.inputs)
return inputs_.MutableMap();
}
// map<string, .tensorflow.TensorInfo> outputs = 2;
int SignatureDef::outputs_size() const {
return outputs_.size();
}
void SignatureDef::clear_outputs() {
outputs_.Clear();
}
const ::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >&
SignatureDef::outputs() const {
// @@protoc_insertion_point(field_map:tensorflow.SignatureDef.outputs)
return outputs_.GetMap();
}
::google::protobuf::Map< ::std::string, ::tensorflow::TensorInfo >*
SignatureDef::mutable_outputs() {
// @@protoc_insertion_point(field_mutable_map:tensorflow.SignatureDef.outputs)
return outputs_.MutableMap();
}
// string method_name = 3;
void SignatureDef::clear_method_name() {
method_name_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
const ::std::string& SignatureDef::method_name() const {
// @@protoc_insertion_point(field_get:tensorflow.SignatureDef.method_name)
return method_name_.Get();
}
void SignatureDef::set_method_name(const ::std::string& value) {
method_name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
// @@protoc_insertion_point(field_set:tensorflow.SignatureDef.method_name)
}
void SignatureDef::set_method_name(const char* value) {
method_name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_char:tensorflow.SignatureDef.method_name)
}
void SignatureDef::set_method_name(const char* value,
size_t size) {
method_name_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_pointer:tensorflow.SignatureDef.method_name)
}
::std::string* SignatureDef::mutable_method_name() {
// @@protoc_insertion_point(field_mutable:tensorflow.SignatureDef.method_name)
return method_name_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* SignatureDef::release_method_name() {
// @@protoc_insertion_point(field_release:tensorflow.SignatureDef.method_name)
return method_name_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* SignatureDef::unsafe_arena_release_method_name() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.SignatureDef.method_name)
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
return method_name_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
GetArenaNoVirtual());
}
void SignatureDef::set_allocated_method_name(::std::string* method_name) {
if (method_name != NULL) {
} else {
}
method_name_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), method_name,
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_allocated:tensorflow.SignatureDef.method_name)
}
void SignatureDef::unsafe_arena_set_allocated_method_name(
::std::string* method_name) {
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
if (method_name != NULL) {
} else {
}
method_name_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
method_name, GetArenaNoVirtual());
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.SignatureDef.method_name)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
void AssetFileDef::_slow_mutable_tensor_info() {
tensor_info_ = ::google::protobuf::Arena::CreateMessage< ::tensorflow::TensorInfo >(
GetArenaNoVirtual());
}
::tensorflow::TensorInfo* AssetFileDef::_slow_release_tensor_info() {
if (tensor_info_ == NULL) {
return NULL;
} else {
::tensorflow::TensorInfo* temp = new ::tensorflow::TensorInfo(*tensor_info_);
tensor_info_ = NULL;
return temp;
}
}
::tensorflow::TensorInfo* AssetFileDef::unsafe_arena_release_tensor_info() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.AssetFileDef.tensor_info)
::tensorflow::TensorInfo* temp = tensor_info_;
tensor_info_ = NULL;
return temp;
}
void AssetFileDef::_slow_set_allocated_tensor_info(
::google::protobuf::Arena* message_arena, ::tensorflow::TensorInfo** tensor_info) {
if (message_arena != NULL &&
::google::protobuf::Arena::GetArena(*tensor_info) == NULL) {
message_arena->Own(*tensor_info);
} else if (message_arena !=
::google::protobuf::Arena::GetArena(*tensor_info)) {
::tensorflow::TensorInfo* new_tensor_info =
::google::protobuf::Arena::CreateMessage< ::tensorflow::TensorInfo >(
message_arena);
new_tensor_info->CopyFrom(**tensor_info);
*tensor_info = new_tensor_info;
}
}
void AssetFileDef::unsafe_arena_set_allocated_tensor_info(
::tensorflow::TensorInfo* tensor_info) {
if (GetArenaNoVirtual() == NULL) {
delete tensor_info_;
}
tensor_info_ = tensor_info;
if (tensor_info) {
} else {
}
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.AssetFileDef.tensor_info)
}
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int AssetFileDef::kTensorInfoFieldNumber;
const int AssetFileDef::kFilenameFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
AssetFileDef::AssetFileDef()
: ::google::protobuf::Message(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:tensorflow.AssetFileDef)
}
AssetFileDef::AssetFileDef(::google::protobuf::Arena* arena)
: ::google::protobuf::Message(),
_internal_metadata_(arena) {
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
SharedCtor();
RegisterArenaDtor(arena);
// @@protoc_insertion_point(arena_constructor:tensorflow.AssetFileDef)
}
AssetFileDef::AssetFileDef(const AssetFileDef& from)
: ::google::protobuf::Message(),
_internal_metadata_(NULL),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
filename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.filename().size() > 0) {
filename_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filename(),
GetArenaNoVirtual());
}
if (from.has_tensor_info()) {
tensor_info_ = new ::tensorflow::TensorInfo(*from.tensor_info_);
} else {
tensor_info_ = NULL;
}
// @@protoc_insertion_point(copy_constructor:tensorflow.AssetFileDef)
}
void AssetFileDef::SharedCtor() {
filename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
tensor_info_ = NULL;
_cached_size_ = 0;
}
AssetFileDef::~AssetFileDef() {
// @@protoc_insertion_point(destructor:tensorflow.AssetFileDef)
SharedDtor();
}
void AssetFileDef::SharedDtor() {
::google::protobuf::Arena* arena = GetArenaNoVirtual();
if (arena != NULL) {
return;
}
filename_.Destroy(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), arena);
if (this != internal_default_instance()) {
delete tensor_info_;
}
}
void AssetFileDef::ArenaDtor(void* object) {
AssetFileDef* _this = reinterpret_cast< AssetFileDef* >(object);
(void)_this;
}
void AssetFileDef::RegisterArenaDtor(::google::protobuf::Arena* arena) {
}
void AssetFileDef::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* AssetFileDef::descriptor() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[14].descriptor;
}
const AssetFileDef& AssetFileDef::default_instance() {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::InitDefaults();
return *internal_default_instance();
}
AssetFileDef* AssetFileDef::New(::google::protobuf::Arena* arena) const {
return ::google::protobuf::Arena::CreateMessage<AssetFileDef>(arena);
}
void AssetFileDef::Clear() {
// @@protoc_insertion_point(message_clear_start:tensorflow.AssetFileDef)
filename_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
if (GetArenaNoVirtual() == NULL && tensor_info_ != NULL) {
delete tensor_info_;
}
tensor_info_ = NULL;
}
bool AssetFileDef::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:tensorflow.AssetFileDef)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// .tensorflow.TensorInfo tensor_info = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_tensor_info()));
} else {
goto handle_unusual;
}
break;
}
// string filename = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_filename()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->filename().data(), this->filename().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"tensorflow.AssetFileDef.filename"));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:tensorflow.AssetFileDef)
return true;
failure:
// @@protoc_insertion_point(parse_failure:tensorflow.AssetFileDef)
return false;
#undef DO_
}
void AssetFileDef::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:tensorflow.AssetFileDef)
// .tensorflow.TensorInfo tensor_info = 1;
if (this->has_tensor_info()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, *this->tensor_info_, output);
}
// string filename = 2;
if (this->filename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->filename().data(), this->filename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.AssetFileDef.filename");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
2, this->filename(), output);
}
// @@protoc_insertion_point(serialize_end:tensorflow.AssetFileDef)
}
::google::protobuf::uint8* AssetFileDef::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
// @@protoc_insertion_point(serialize_to_array_start:tensorflow.AssetFileDef)
// .tensorflow.TensorInfo tensor_info = 1;
if (this->has_tensor_info()) {
target = ::google::protobuf::internal::WireFormatLite::
InternalWriteMessageNoVirtualToArray(
1, *this->tensor_info_, false, target);
}
// string filename = 2;
if (this->filename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->filename().data(), this->filename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"tensorflow.AssetFileDef.filename");
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
2, this->filename(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:tensorflow.AssetFileDef)
return target;
}
size_t AssetFileDef::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:tensorflow.AssetFileDef)
size_t total_size = 0;
// string filename = 2;
if (this->filename().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->filename());
}
// .tensorflow.TensorInfo tensor_info = 1;
if (this->has_tensor_info()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->tensor_info_);
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void AssetFileDef::MergeFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_merge_from_start:tensorflow.AssetFileDef)
GOOGLE_DCHECK_NE(&from, this);
const AssetFileDef* source =
::google::protobuf::internal::DynamicCastToGenerated<const AssetFileDef>(
&from);
if (source == NULL) {
// @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.AssetFileDef)
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
// @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.AssetFileDef)
MergeFrom(*source);
}
}
void AssetFileDef::MergeFrom(const AssetFileDef& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.AssetFileDef)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
if (from.filename().size() > 0) {
set_filename(from.filename());
}
if (from.has_tensor_info()) {
mutable_tensor_info()->::tensorflow::TensorInfo::MergeFrom(from.tensor_info());
}
}
void AssetFileDef::CopyFrom(const ::google::protobuf::Message& from) {
// @@protoc_insertion_point(generalized_copy_from_start:tensorflow.AssetFileDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
void AssetFileDef::CopyFrom(const AssetFileDef& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.AssetFileDef)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool AssetFileDef::IsInitialized() const {
return true;
}
void AssetFileDef::Swap(AssetFileDef* other) {
if (other == this) return;
if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
InternalSwap(other);
} else {
AssetFileDef* temp = New(GetArenaNoVirtual());
temp->MergeFrom(*other);
other->CopyFrom(*this);
InternalSwap(temp);
if (GetArenaNoVirtual() == NULL) {
delete temp;
}
}
}
void AssetFileDef::UnsafeArenaSwap(AssetFileDef* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
InternalSwap(other);
}
void AssetFileDef::InternalSwap(AssetFileDef* other) {
filename_.Swap(&other->filename_);
std::swap(tensor_info_, other->tensor_info_);
std::swap(_cached_size_, other->_cached_size_);
}
::google::protobuf::Metadata AssetFileDef::GetMetadata() const {
protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::protobuf_AssignDescriptorsOnce();
return protobuf_tensorflow_2fcore_2fprotobuf_2fmeta_5fgraph_2eproto::file_level_metadata[14];
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// AssetFileDef
// .tensorflow.TensorInfo tensor_info = 1;
bool AssetFileDef::has_tensor_info() const {
return this != internal_default_instance() && tensor_info_ != NULL;
}
void AssetFileDef::clear_tensor_info() {
if (GetArenaNoVirtual() == NULL && tensor_info_ != NULL) delete tensor_info_;
tensor_info_ = NULL;
}
const ::tensorflow::TensorInfo& AssetFileDef::tensor_info() const {
// @@protoc_insertion_point(field_get:tensorflow.AssetFileDef.tensor_info)
return tensor_info_ != NULL ? *tensor_info_
: *::tensorflow::TensorInfo::internal_default_instance();
}
::tensorflow::TensorInfo* AssetFileDef::mutable_tensor_info() {
if (tensor_info_ == NULL) {
_slow_mutable_tensor_info();
}
// @@protoc_insertion_point(field_mutable:tensorflow.AssetFileDef.tensor_info)
return tensor_info_;
}
::tensorflow::TensorInfo* AssetFileDef::release_tensor_info() {
// @@protoc_insertion_point(field_release:tensorflow.AssetFileDef.tensor_info)
if (GetArenaNoVirtual() != NULL) {
return _slow_release_tensor_info();
} else {
::tensorflow::TensorInfo* temp = tensor_info_;
tensor_info_ = NULL;
return temp;
}
}
void AssetFileDef::set_allocated_tensor_info(::tensorflow::TensorInfo* tensor_info) {
::google::protobuf::Arena* message_arena = GetArenaNoVirtual();
if (message_arena == NULL) {
delete tensor_info_;
}
if (tensor_info != NULL) {
_slow_set_allocated_tensor_info(message_arena, &tensor_info);
}
tensor_info_ = tensor_info;
if (tensor_info) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:tensorflow.AssetFileDef.tensor_info)
}
// string filename = 2;
void AssetFileDef::clear_filename() {
filename_.ClearToEmpty(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
const ::std::string& AssetFileDef::filename() const {
// @@protoc_insertion_point(field_get:tensorflow.AssetFileDef.filename)
return filename_.Get();
}
void AssetFileDef::set_filename(const ::std::string& value) {
filename_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value, GetArenaNoVirtual());
// @@protoc_insertion_point(field_set:tensorflow.AssetFileDef.filename)
}
void AssetFileDef::set_filename(const char* value) {
filename_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value),
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_char:tensorflow.AssetFileDef.filename)
}
void AssetFileDef::set_filename(const char* value,
size_t size) {
filename_.Set(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(
reinterpret_cast<const char*>(value), size), GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_pointer:tensorflow.AssetFileDef.filename)
}
::std::string* AssetFileDef::mutable_filename() {
// @@protoc_insertion_point(field_mutable:tensorflow.AssetFileDef.filename)
return filename_.Mutable(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* AssetFileDef::release_filename() {
// @@protoc_insertion_point(field_release:tensorflow.AssetFileDef.filename)
return filename_.Release(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual());
}
::std::string* AssetFileDef::unsafe_arena_release_filename() {
// @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.AssetFileDef.filename)
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
return filename_.UnsafeArenaRelease(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
GetArenaNoVirtual());
}
void AssetFileDef::set_allocated_filename(::std::string* filename) {
if (filename != NULL) {
} else {
}
filename_.SetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), filename,
GetArenaNoVirtual());
// @@protoc_insertion_point(field_set_allocated:tensorflow.AssetFileDef.filename)
}
void AssetFileDef::unsafe_arena_set_allocated_filename(
::std::string* filename) {
GOOGLE_DCHECK(GetArenaNoVirtual() != NULL);
if (filename != NULL) {
} else {
}
filename_.UnsafeArenaSetAllocated(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
filename, GetArenaNoVirtual());
// @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.AssetFileDef.filename)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// @@protoc_insertion_point(namespace_scope)
} // namespace tensorflow
// @@protoc_insertion_point(global_scope)<|fim▁end|> | // @@protoc_insertion_point(serialize_end:tensorflow.CollectionDef)
}
::google::protobuf::uint8* CollectionDef::InternalSerializeWithCachedSizesToArray( |
<|file_name|>postcss.config.js<|end_file_name|><|fim▁begin|>const {env, browsers} = require('config');
const isProd = env === 'production';
module.exports = {<|fim▁hole|> }
};<|fim▁end|> | // parser: 'sugarss',
plugins: {
'postcss-preset-env': browsers,
'cssnano': isProd ? {} : false, |
<|file_name|>ConstantFactory.js<|end_file_name|><|fim▁begin|>/**
* Created by BlueX on 6/11/16.
*/
angular
.module('TK-WEB-PITCH')
.factory('ConstantFactory', function(){
//var API_URL = 'http://127.0.0.1:8888/TK-API/';
var API_URL = 'http://api-test.tarangkhmer.com/';
<|fim▁hole|> // return $http.get('http://192.168.1.100:8888/TK-API/sportclubs.json', config);
return {
API_URL: API_URL
}
});<|fim▁end|> | |
<|file_name|>atari_breakout_dqn_cntk.py<|end_file_name|><|fim▁begin|>import random
import numpy as np
import math
from time import perf_counter
import os
import sys
from collections import deque
import gym
import cntk
from cntk.layers import Convolution, MaxPooling, Dense
from cntk.models import Sequential, LayerStack
from cntk.initializer import glorot_normal
env = gym.make("Breakout-v0")
NUM_ACTIONS = env.action_space.n
SCREEN_H_ORIG, SCREEN_W_ORIG, NUM_COLOUR_CHANNELS = env.observation_space.shape
def preprocess_image(screen_image):
# crop the top and bottom
screen_image = screen_image[35:195]
# down sample by a factor of 2
screen_image = screen_image[::2, ::2]
# convert to grey scale
grey_image = np.zeros(screen_image.shape[0:2])
for i in range(len(screen_image)):
for j in range(len(screen_image[i])):
grey_image[i][j] = np.mean(screen_image[i][j])
return np.array([grey_image.astype(np.float)])
CHANNELS, IMAGE_H, IMAGE_W = preprocess_image(np.zeros((SCREEN_H_ORIG, SCREEN_W_ORIG))).shape
STATE_DIMS = (1, IMAGE_H, IMAGE_W)
class Brain:
BATCH_SIZE = 5
def __init__(self):
#### Construct the model ####
observation = cntk.ops.input_variable(STATE_DIMS, np.float32, name="s")
q_target = cntk.ops.input_variable(NUM_ACTIONS, np.float32, name="q")
# Define the structure of the neural network
self.model = self.create_convolutional_neural_network(observation, NUM_ACTIONS)
#### Define the trainer ####
self.learning_rate = cntk.learner.training_parameter_schedule(0.0001, cntk.UnitType.sample)
self.momentum = cntk.learner.momentum_as_time_constant_schedule(0.99)<|fim▁hole|> mean_error = cntk.ops.reduce_mean(cntk.ops.square(self.model - q_target), axis=0)
learner = cntk.adam_sgd(self.model.parameters, self.learning_rate, momentum=self.momentum)
self.trainer = cntk.Trainer(self.model, self.loss, mean_error, learner)
def train(self, x, y):
data = dict(zip(self.loss.arguments, [y, x]))
self.trainer.train_minibatch(data, outputs=[self.loss.output])
def predict(self, s):
return self.model.eval([s])
@staticmethod
def create_multi_layer_neural_network(input_vars, out_dims, num_hidden_layers):
num_hidden_neurons = 128
hidden_layer = lambda: Dense(num_hidden_neurons, activation=cntk.ops.relu)
output_layer = Dense(out_dims, activation=None)
model = Sequential([LayerStack(num_hidden_layers, hidden_layer),
output_layer])(input_vars)
return model
@staticmethod
def create_convolutional_neural_network(input_vars, out_dims):
convolutional_layer_1 = Convolution((5, 5), 32, strides=1, activation=cntk.ops.relu, pad=True,
init=glorot_normal(), init_bias=0.1)
pooling_layer_1 = MaxPooling((2, 2), strides=(2, 2), pad=True)
convolutional_layer_2 = Convolution((5, 5), 64, strides=1, activation=cntk.ops.relu, pad=True,
init=glorot_normal(), init_bias=0.1)
pooling_layer_2 = MaxPooling((2, 2), strides=(2, 2), pad=True)
convolutional_layer_3 = Convolution((5, 5), 128, strides=1, activation=cntk.ops.relu, pad=True,
init=glorot_normal(), init_bias=0.1)
pooling_layer_3 = MaxPooling((2, 2), strides=(2, 2), pad=True)
fully_connected_layer = Dense(1024, activation=cntk.ops.relu, init=glorot_normal(), init_bias=0.1)
output_layer = Dense(out_dims, activation=None, init=glorot_normal(), init_bias=0.1)
model = Sequential([convolutional_layer_1, pooling_layer_1,
convolutional_layer_2, pooling_layer_2,
#convolutional_layer_3, pooling_layer_3,
fully_connected_layer,
output_layer])(input_vars)
return model
class Memory:
def __init__(self, capacity):
self.examplers = deque(maxlen=capacity)
self.capacity = capacity
def add(self, sample):
self.examplers.append(sample)
def get_random_samples(self, num_samples):
num_samples = min(num_samples, len(self.examplers))
return random.sample(tuple(self.examplers), num_samples)
def get_stack(self, start_index, stack_size):
end_index = len(self.examplers) - stack_size
if end_index < 0:
stack = list(self.examplers) + [self.examplers[-1] for _ in range(-end_index)]
else:
start_index = min(start_index, end_index)
stack = [self.examplers[i + start_index] for i in range(stack_size)]
return np.stack(stack, axis=-1)
def get_random_stacks(self, num_samples, stack_size):
start_indices = random.sample(range(len(self.examplers)), num_samples)
return [self.get_stack(start_index, stack_size) for start_index in start_indices]
def get_latest_stack(self, stack_size):
return self.get_stack(len(self.examplers), stack_size)
class Agent:
MEMORY_CAPACITY = 100000
DISCOUNT_FACTOR = 0.99
MAX_EXPLORATION_RATE = 1.0
MIN_EXPLORATION_RATE = 0.01
DECAY_RATE = 0.0001
def __init__(self):
self.explore_rate = self.MAX_EXPLORATION_RATE
self.brain = Brain()
self.memory = Memory(self.MEMORY_CAPACITY)
self.steps = 0
def act(self, s):
if random.random() < self.explore_rate:
return random.randint(0, NUM_ACTIONS - 1)
else:
return np.argmax(self.brain.predict(s))
def observe(self, sample):
self.steps += 1
self.memory.add(sample)
# Reduces exploration rate linearly
self.explore_rate = self.MIN_EXPLORATION_RATE + (self.MAX_EXPLORATION_RATE - self.MIN_EXPLORATION_RATE) * math.exp(-self.DECAY_RATE * self.steps)
def replay(self):
batch = self.memory.get_random_samples(self.brain.BATCH_SIZE)
batch_len = len(batch)
states = np.array([sample[0] for sample in batch], dtype=np.float32)
no_state = np.zeros(STATE_DIMS)
resultant_states = np.array([(no_state if sample[3] is None else sample[3]) for sample in batch], dtype=np.float32)
q_values_batch = self.brain.predict(states)
future_q_values_batch = self.brain.predict(resultant_states)
x = np.zeros((batch_len, ) + STATE_DIMS).astype(np.float32)
y = np.zeros((batch_len, NUM_ACTIONS)).astype(np.float32)
for i in range(batch_len):
state, action, reward, resultant_state = batch[i]
q_values = q_values_batch[0][i]
if resultant_state is None:
q_values[action] = reward
else:
q_values[action] = reward + self.DISCOUNT_FACTOR * np.amax(future_q_values_batch[0][i])
x[i] = state
y[i] = q_values
self.brain.train(x, y)
@classmethod
def action_from_output(cls, output_array):
return np.argmax(output_array)
def run_simulation(agent, solved_reward_level):
state = env.reset()
state = preprocess_image(state)
total_rewards = 0
time_step = 0
while True:
#env.render()
time_step += 1
action = agent.act(state.astype(np.float32))
resultant_state, reward, done, info = env.step(action)
resultant_state = preprocess_image(resultant_state)
if done: # terminal state
resultant_state = None
agent.observe((state, action, reward, resultant_state))
agent.replay()
state = resultant_state
total_rewards += reward
if total_rewards > solved_reward_level or done:
return total_rewards, time_step
def test(model_path, num_episodes=10):
root = cntk.load_model(model_path)
observation = env.reset() # reset environment for new episode
done = False
for episode in range(num_episodes):
while not done:
try:
env.render()
except Exception:
# this might fail on a VM without OpenGL
pass
observation = preprocess_image(observation)
action = np.argmax(root.eval(observation.astype(np.float32)))
observation, reward, done, info = env.step(action)
if done:
observation = env.reset() # reset environment for new episode
if __name__ == "__main__":
# Ensure we always get the same amount of randomness
np.random.seed(0)
GYM_ENABLE_UPLOAD = False
GYM_VIDEO_PATH = os.path.join(os.getcwd(), "videos", "atari_breakout_dpn_cntk")
GYM_API_KEY = "sk_93AMQvdmReWCi8pdL4m6Q"
MAX_NUM_EPISODES = 1000
STREAK_TO_END = 120
DONE_REWARD_LEVEL = 50
TRAINED_MODEL_DIR = os.path.join(os.getcwd(), "trained_models")
if not os.path.exists(TRAINED_MODEL_DIR):
os.makedirs(TRAINED_MODEL_DIR)
TRAINED_MODEL_NAME = "atari_breakout_dpn.mod"
EPISODES_PER_PRINT_PROGRESS = 1
EPISODES_PER_SAVE = 5
if len(sys.argv) < 2 or sys.argv[1] != "test_only":
if GYM_ENABLE_UPLOAD:
env.monitor.start(GYM_VIDEO_PATH, force=True)
agent = Agent()
episode_number = 0
num_streaks = 0
reward_sum = 0
time_step_sum = 0
solved_episode = -1
training_start_time = perf_counter()
while episode_number < MAX_NUM_EPISODES:
# Run the simulation and train the agent
reward, time_step = run_simulation(agent, DONE_REWARD_LEVEL*2)
reward_sum += reward
time_step_sum += time_step
episode_number += 1
if episode_number % EPISODES_PER_PRINT_PROGRESS == 0:
t = perf_counter() - training_start_time
print("(%d s) Episode: %d, Average reward = %.3f, Average number of time steps = %.3f."
% (t, episode_number, reward_sum / EPISODES_PER_PRINT_PROGRESS, time_step_sum/EPISODES_PER_PRINT_PROGRESS))
reward_sum = 0
time_step_sum = 0
# It is considered solved when the sum of reward is over 200
if reward > DONE_REWARD_LEVEL:
num_streaks += 1
solved_episode = episode_number
else:
num_streaks = 0
solved_episode = -1
# It's considered done when it's solved over 120 times consecutively
if num_streaks > STREAK_TO_END:
print("Task solved in %d episodes and repeated %d times." % (episode_number, num_streaks))
break
if episode_number % EPISODES_PER_SAVE == 0:
agent.brain.model.save_model(os.path.join(TRAINED_MODEL_DIR, TRAINED_MODEL_NAME), False)
agent.brain.model.save_model(os.path.join(TRAINED_MODEL_DIR, TRAINED_MODEL_NAME), False)
if GYM_ENABLE_UPLOAD:
env.monitor.close()
gym.upload(GYM_VIDEO_PATH, api_key=GYM_API_KEY)
# testing the model
test(os.path.join(TRAINED_MODEL_DIR, TRAINED_MODEL_NAME), num_episodes=10)<|fim▁end|> |
self.loss = cntk.ops.reduce_mean(cntk.ops.square(self.model - q_target), axis=0) |
<|file_name|>rgBuildSettingsViewVulkan.cpp<|end_file_name|><|fim▁begin|>// C++.
#include <cassert>
#include <sstream>
#include <algorithm>
// Qt.
#include <QWidget>
#include <QMessageBox>
#include <QDesktopWidget>
#include <QFileDialog>
// Infra.
#include <QtCommon/Util/RestoreCursorPosition.h>
#include <QtCommon/Scaling/ScalingManager.h>
#include <Utils/Include/rgaCliDefs.h>
// Local.
#include <RadeonGPUAnalyzerGUI/Include/Qt/rgBuildSettingsView.h>
#include <RadeonGPUAnalyzerGUI/Include/Qt/rgBuildSettingsViewVulkan.h>
#include <RadeonGPUAnalyzerGUI/Include/Qt/rgCheckBox.h>
#include <RadeonGPUAnalyzerGUI/Include/Qt/rgIncludeDirectoriesView.h>
#include <RadeonGPUAnalyzerGUI/Include/Qt/rgLineEdit.h>
#include <RadeonGPUAnalyzerGUI/Include/Qt/rgPreprocessorDirectivesDialog.h>
#include <RadeonGPUAnalyzerGUI/Include/rgCliUtils.h>
#include <RadeonGPUAnalyzerGUI/Include/rgConfigManager.h>
#include <RadeonGPUAnalyzerGUI/Include/rgStringConstants.h>
#include <RadeonGPUAnalyzerGUI/Include/rgUtils.h>
// Checkbox tool tip stylesheets.
static const char* s_STR_FILEMENU_TITLE_BAR_TOOLTIP_WIDTH = "min-width: %1px; width: %2px;";
static const char* s_STR_FILEMENU_TITLE_BAR_TOOLTIP_HEIGHT = "min-height: %1px; height: %2px; max-height: %3px;";
// The delimiter to use to join and split a string of option items.
static const char* s_OPTIONS_LIST_DELIMITER = ";";
rgBuildSettingsViewVulkan::rgBuildSettingsViewVulkan(QWidget* pParent, const rgBuildSettingsVulkan& buildSettings, bool isGlobalSettings) :
rgBuildSettingsView(pParent, isGlobalSettings),
m_initialSettings(buildSettings)
{
// Setup the UI.
ui.setupUi(this);
// Set the background to white.
QPalette pal = palette();
pal.setColor(QPalette::Background, Qt::white);
this->setAutoFillBackground(true);
this->setPalette(pal);
// Create the include directories editor view.
m_pIncludeDirectoriesView = new rgIncludeDirectoriesView(s_OPTIONS_LIST_DELIMITER, this);
// Create the preprocessor directives editor dialog.
m_pPreprocessorDirectivesDialog = new rgPreprocessorDirectivesDialog(s_OPTIONS_LIST_DELIMITER, this);
// Connect the signals.
ConnectSignals();
// Connect focus in/out events for all of the line edits.
ConnectLineEditFocusEvents();
// Connect clicked events for check boxes.
ConnectCheckBoxClickedEvents();
// Initialize the UI based on the incoming build settings.
PushToWidgets(buildSettings);
// Initialize the command line preview text.
UpdateCommandLineText();
// Set tooltips for general items.
ui.targetGPUsLabel->setToolTip(STR_BUILD_SETTINGS_TARGET_GPUS_TOOLTIP);
ui.predefinedMacrosLabel->setToolTip(STR_BUILD_SETTINGS_PREDEFINED_MACROS_TOOLTIP);
ui.includeDirectoriesLabel->setToolTip(STR_BUILD_SETTINGS_ADDITIONAL_INC_DIRECTORY_TOOLTIP);
// Set tooltip for Vulkan runtime section.
ui.vulkanOptionsHeaderLabel->setToolTip(STR_BUILD_SETTINGS_VULKAN_RUNTIME_TOOLTIP);
// Set tooltip for ICD location item.
ui.ICDLocationLabel->setToolTip(CLI_OPT_ICD_DESCRIPTION);
// Set tooltips for alternative compiler (glslang) component.
// Use the same tooltip for both the title and the item purposely.
ui.alternativeCompilerHeaderLabel->setToolTip(STR_BUILD_SETTINGS_ALTERNATIVE_COMPILER_GLSLANG_TOOLTIP);
ui.glslangOptionsLabel->setToolTip(CLI_OPT_GLSLANG_OPT_DESCRIPTION_A);
ui.compilerBinariesLabel->setToolTip(CLI_DESC_ALTERNATIVE_VK_BIN_FOLDER);
// Set tooltip for the General section.
ui.generalHeaderLabel->setToolTip(STR_BUILD_SETTINGS_GENERAL_TOOLTIP);
// Set the tooltip for the command line section of the build settings.
ui.settingsCommandLineHeaderLabel->setToolTip(STR_BUILD_SETTINGS_CMD_LINE_TOOLTIP);
// Set the mouse cursor to the pointing hand cursor for various widgets.
SetCursor();
// Set the event filter for "All Options" text edit.
ui.allOptionsTextEdit->installEventFilter(this);
// Hide HLSL options for now.
HideHLSLOptions();
}
void rgBuildSettingsViewVulkan::HideHLSLOptions()
{
// Hide HLSL options for now.
ui.vulkanOptionsHeaderLabel->hide();
ui.generateDebugInfoCheckBox->hide();
ui.generateDebugInfoLabel->hide();
ui.noExplicitBindingsCheckBox->hide();
ui.noExplicitBindingsLabel->hide();
ui.useHLSLBlockOffsetsCheckBox->hide();
ui.useHLSLBlockOffsetsLabel->hide();
ui.useHLSLIOMappingCheckBox->hide();
ui.useHLSLIOMappingLabel->hide();
}
void rgBuildSettingsViewVulkan::ConnectCheckBoxClickedEvents()
{
bool isConnected = false;
isConnected = connect(ui.generateDebugInfoCheckBox, &rgCheckBox::clicked, this, &rgBuildSettingsViewVulkan::HandleCheckBoxClickedEvent);
assert(isConnected);
isConnected = connect(ui.noExplicitBindingsCheckBox, &rgCheckBox::clicked, this, &rgBuildSettingsViewVulkan::HandleCheckBoxClickedEvent);
assert(isConnected);
isConnected = connect(ui.useHLSLBlockOffsetsCheckBox, &rgCheckBox::clicked, this, &rgBuildSettingsViewVulkan::HandleCheckBoxClickedEvent);
assert(isConnected);
isConnected = connect(ui.useHLSLIOMappingCheckBox, &rgCheckBox::clicked, this, &rgBuildSettingsViewVulkan::HandleCheckBoxClickedEvent);
assert(isConnected);
isConnected = connect(ui.enableValidationLayersCheckBox, &rgCheckBox::clicked, this, &rgBuildSettingsViewVulkan::HandleCheckBoxClickedEvent);
assert(isConnected);
}
// Make the UI reflect the values in the supplied settings struct.
void rgBuildSettingsViewVulkan::PushToWidgets(const rgBuildSettingsVulkan& settings)
{
// Disable any signals from the widgets while they're being populated.
QSignalBlocker signalBlockerTargetGPUsLineEdit(ui.targetGPUsLineEdit);
QSignalBlocker signalBlockerPredefinedMacrosLineEdit(ui.predefinedMacrosLineEdit);
QSignalBlocker signalBlockerIncludeDirectoriesLineEdit(ui.includeDirectoriesLineEdit);
QSignalBlocker signalBlockerGenerateDebugInfoCheckBox(ui.generateDebugInfoCheckBox);
QSignalBlocker signalBlockerNoExplicitBindingsCheckBox(ui.noExplicitBindingsCheckBox);
QSignalBlocker signalBlockerUseHLSLBlockOffsetsCheckBox(ui.useHLSLBlockOffsetsCheckBox);
QSignalBlocker signalBlockerUseHLSLIOMappingCheckBox(ui.useHLSLIOMappingCheckBox);
QSignalBlocker signalBlockerEnableValidationLayersCheckBox(ui.enableValidationLayersCheckBox);
QSignalBlocker signalBlockerICDLocationLineEdit(ui.ICDLocationLineEdit);
QSignalBlocker signalBlockerGlslangOptionsLineEdit(ui.glslangOptionsLineEdit);
QSignalBlocker signalBlockerCompilerBinariesLineEdit(ui.compilerBinariesLineEdit);
QSignalBlocker signalBlockerOutputFileBinaryNameLineEdit(ui.outputFileBinaryNameLineEdit);
// The items below are common build settings for all API types.
QString targetGpusList(rgUtils::BuildSemicolonSeparatedStringList(settings.m_targetGpus).c_str());
ui.targetGPUsLineEdit->setText(targetGpusList);
QString predefinedMacros(rgUtils::BuildSemicolonSeparatedStringList(settings.m_predefinedMacros).c_str());
ui.predefinedMacrosLineEdit->setText(predefinedMacros);
QString additionalIncludeDirs(rgUtils::BuildSemicolonSeparatedStringList(settings.m_additionalIncludeDirectories).c_str());
ui.includeDirectoriesLineEdit->setText(additionalIncludeDirs);
// Items below are Vulkan-specific build settings only.
ui.generateDebugInfoCheckBox->setChecked(settings.m_isGenerateDebugInfoChecked);
ui.noExplicitBindingsCheckBox->setChecked(settings.m_isNoExplicitBindingsChecked);
ui.useHLSLBlockOffsetsCheckBox->setChecked(settings.m_isUseHlslBlockOffsetsChecked);
ui.useHLSLIOMappingCheckBox->setChecked(settings.m_isUseHlslIoMappingChecked);
ui.enableValidationLayersCheckBox->setChecked(settings.m_isEnableValidationLayersChecked);
ui.ICDLocationLineEdit->setText(QString::fromStdString(settings.m_ICDLocation));
ui.glslangOptionsLineEdit->setText(QString::fromStdString(settings.m_glslangOptions));
ui.compilerBinariesLineEdit->setText(QString::fromStdString(std::get<CompilerFolderType::Bin>(settings.m_compilerPaths)));
// Output binary file name.
ui.outputFileBinaryNameLineEdit->setText(settings.m_binaryFileName.c_str());
}
rgBuildSettingsVulkan rgBuildSettingsViewVulkan::PullFromWidgets() const
{
rgBuildSettingsVulkan settings;
// Target GPUs.
std::vector<std::string> targetGPUsVector;
const std::string& commaSeparatedTargetGPUs = ui.targetGPUsLineEdit->text().toStdString();
rgUtils::splitString(commaSeparatedTargetGPUs, rgConfigManager::RGA_LIST_DELIMITER, targetGPUsVector);
settings.m_targetGpus = targetGPUsVector;
// Predefined Macros.
std::vector<std::string> predefinedMacrosVector;
const std::string& commaSeparatedPredefinedMacros = ui.predefinedMacrosLineEdit->text().toStdString();
rgUtils::splitString(commaSeparatedPredefinedMacros, rgConfigManager::RGA_LIST_DELIMITER, predefinedMacrosVector);
settings.m_predefinedMacros = predefinedMacrosVector;
// Additional Include Directories.
std::vector<std::string> additionalIncludeDirectoriesVector;
const std::string& commaSeparatedAdditionalIncludeDirectories = ui.includeDirectoriesLineEdit->text().toStdString();
rgUtils::splitString(commaSeparatedAdditionalIncludeDirectories, rgConfigManager::RGA_LIST_DELIMITER, additionalIncludeDirectoriesVector);
settings.m_additionalIncludeDirectories = additionalIncludeDirectoriesVector;
// Vulkan-specific settings.
settings.m_isGenerateDebugInfoChecked = ui.generateDebugInfoCheckBox->isChecked();
settings.m_isNoExplicitBindingsChecked = ui.noExplicitBindingsCheckBox->isChecked();
settings.m_isUseHlslBlockOffsetsChecked = ui.useHLSLBlockOffsetsCheckBox->isChecked();
settings.m_isUseHlslIoMappingChecked = ui.useHLSLIOMappingCheckBox->isChecked();
settings.m_isEnableValidationLayersChecked = ui.enableValidationLayersCheckBox->isChecked();
settings.m_ICDLocation = ui.ICDLocationLineEdit->text().toStdString();
settings.m_glslangOptions = ui.glslangOptionsLineEdit->text().toStdString();
std::get<CompilerFolderType::Bin>(settings.m_compilerPaths) = ui.compilerBinariesLineEdit->text().toStdString();
// Binary output file name.
settings.m_binaryFileName = ui.outputFileBinaryNameLineEdit->text().toStdString();
return settings;
}
void rgBuildSettingsViewVulkan::ConnectSignals()
{
// Add target GPU button.
bool isConnected = connect(this->ui.addTargetGPUsButton, &QPushButton::clicked, this, &rgBuildSettingsViewVulkan::HandleAddTargetGpusButtonClick);
assert(isConnected);
// Add include directories editor dialog button.
isConnected = connect(this->ui.includeDirsBrowseButton, &QPushButton::clicked, this, &rgBuildSettingsViewVulkan::HandleIncludeDirsBrowseButtonClick);
assert(isConnected);
// Add preprocessor directives editor dialog button.
isConnected = connect(this->ui.predefinedMacrosBrowseButton, &QPushButton::clicked, this, &rgBuildSettingsViewVulkan::HandlePreprocessorDirectivesBrowseButtonClick);
assert(isConnected);
// Connect all textboxes within the view.
isConnected = connect(this->ui.targetGPUsLineEdit, &QLineEdit::textChanged, this, &rgBuildSettingsViewVulkan::HandleTextEditChanged);
assert(isConnected);
// Handle changes to the Predefined Macros setting.
isConnected = connect(this->ui.predefinedMacrosLineEdit, &QLineEdit::textChanged, this, &rgBuildSettingsViewVulkan::HandleTextEditChanged);
assert(isConnected);
// Handle changes to the Include Directories setting.
isConnected = connect(this->ui.includeDirectoriesLineEdit, &QLineEdit::textChanged, this, &rgBuildSettingsViewVulkan::HandleTextEditChanged);
assert(isConnected);
// Connect the include directory editor dialog's "OK" button click.
isConnected = connect(m_pIncludeDirectoriesView, &rgIncludeDirectoriesView::OKButtonClicked, this, &rgBuildSettingsViewVulkan::HandleIncludeDirsUpdated);
assert(isConnected);
// Connect the preprocessor directives editor dialog's "OK" button click.
isConnected = connect(m_pPreprocessorDirectivesDialog, &rgPreprocessorDirectivesDialog::OKButtonClicked, this, &rgBuildSettingsViewVulkan::HandlePreprocessorDirectivesUpdated);
assert(isConnected);
// Handle changes to the Generate Debug Info checkbox.
isConnected = connect(this->ui.generateDebugInfoCheckBox, &rgCheckBox::stateChanged, this, &rgBuildSettingsViewVulkan::HandleCheckboxStateChanged);
assert(isConnected);
// Handle changes to the No Explicit Bindings checkbox.
isConnected = connect(this->ui.noExplicitBindingsCheckBox, &rgCheckBox::stateChanged, this, &rgBuildSettingsViewVulkan::HandleCheckboxStateChanged);
assert(isConnected);
// Handle changes to the Use HLSL Block Offsets checkbox.
isConnected = connect(this->ui.useHLSLBlockOffsetsCheckBox, &rgCheckBox::stateChanged, this, &rgBuildSettingsViewVulkan::HandleCheckboxStateChanged);
assert(isConnected);
// Handle changes to the Use HLSL IO Mapping checkbox.
isConnected = connect(this->ui.useHLSLIOMappingCheckBox, &rgCheckBox::stateChanged, this, &rgBuildSettingsViewVulkan::HandleCheckboxStateChanged);
assert(isConnected);
// Handle changes to the Enable Validation Layers checkbox.
isConnected = connect(this->ui.enableValidationLayersCheckBox, &rgCheckBox::stateChanged, this, &rgBuildSettingsViewVulkan::HandleCheckboxStateChanged);
assert(isConnected);
// Handle clicking of ICD location browse button.
isConnected = connect(this->ui.ICDLocationBrowseButton, &QPushButton::clicked, this, &rgBuildSettingsViewVulkan::HandleICDLocationBrowseButtonClick);
assert(isConnected);
// Handle changes to the ICD location line edit.
isConnected = connect(this->ui.ICDLocationLineEdit, &QLineEdit::textChanged, this, &rgBuildSettingsViewVulkan::HandleICDLocationLineEditChanged);
assert(isConnected);
// Handle changes to the glslang options line edit.
isConnected = connect(this->ui.glslangOptionsLineEdit, &QLineEdit::textChanged, this, &rgBuildSettingsViewVulkan::HandleGlslangOptionsLineEditChanged);
assert(isConnected);
// Handle clicking of the Alternative Compiler path browse button.
isConnected = connect(this->ui.compilerBrowseButton, &QPushButton::clicked, this, &rgBuildSettingsViewVulkan::HandleAlternativeCompilerBrowseButtonClicked);
assert(isConnected);
// Handle changes to the Alternative Compiler path line edit.
isConnected = connect(this->ui.compilerBinariesLineEdit, &QLineEdit::textChanged, this, &rgBuildSettingsViewVulkan::HandleAlternativeCompilerLineEditChanged);
assert(isConnected);
// Binary Output File name textChanged signal.
isConnected = connect(this->ui.outputFileBinaryNameLineEdit, &QLineEdit::textChanged, this, &rgBuildSettingsViewVulkan::HandleOutputBinaryEditBoxChanged);
assert(isConnected);
// Binary Output File name editingFinished signal.
isConnected = connect(this->ui.outputFileBinaryNameLineEdit, &QLineEdit::editingFinished, this, &rgBuildSettingsViewVulkan::HandleOutputBinaryFileEditingFinished);
assert(isConnected);
}
void rgBuildSettingsViewVulkan::HandleOutputBinaryFileEditingFinished()
{
// Verify that the output binary file text is not empty before losing the focus.
if (this->ui.outputFileBinaryNameLineEdit->text().trimmed().isEmpty() || !rgUtils::IsValidFileName(ui.outputFileBinaryNameLineEdit->text().toStdString()))
{
// Initialize the binary output file name edit line.
std::shared_ptr<rgBuildSettings> pDefaultSettings = rgConfigManager::Instance().GetUserGlobalBuildSettings(rgProjectAPI::Vulkan);
auto pVkDefaultSettings = std::dynamic_pointer_cast<rgBuildSettingsVulkan>(pDefaultSettings);
assert(pVkDefaultSettings != nullptr);
if (pVkDefaultSettings != nullptr)
{
this->ui.outputFileBinaryNameLineEdit->setText(pVkDefaultSettings->m_binaryFileName.c_str());
}
this->ui.outputFileBinaryNameLineEdit->setFocus();
}
// Signal to any listeners that the values in the UI have changed.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
void rgBuildSettingsViewVulkan::HandleOutputBinaryEditBoxChanged(const QString& text)
{
// Update the tooltip.
ui.outputFileBinaryNameLineEdit->setToolTip(text);
// Restore the cursor to the original position when the text has changed.
QtCommon::QtUtil::RestoreCursorPosition cursorPosition(ui.outputFileBinaryNameLineEdit);
// Signal to any listeners that the values in the UI have changed.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
void rgBuildSettingsViewVulkan::HandleICDLocationBrowseButtonClick(bool /* checked */)
{
QString selectedFile = QFileDialog::getOpenFileName(this, tr(STR_ICD_LOCATION_DIALOG_SELECT_FILE_TITLE),
ui.ICDLocationLineEdit->text(),
tr(STR_DIALOG_FILTER_ICD));
if (!selectedFile.isEmpty())
{
ui.ICDLocationLineEdit->setText(selectedFile);
// Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
}
void rgBuildSettingsViewVulkan::HandleICDLocationLineEditChanged(const QString& text)
{
// Restore the cursor to the original position when the text has changed.
QtCommon::QtUtil::RestoreCursorPosition cursorPosition(ui.ICDLocationLineEdit);
// Set the text value.
ui.ICDLocationLineEdit->setText(text);
// Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
void rgBuildSettingsViewVulkan::HandleGlslangOptionsLineEditChanged(const QString& text)
{
// Restore the cursor to the original position when the text has changed.
QtCommon::QtUtil::RestoreCursorPosition cursorPosition(ui.glslangOptionsLineEdit);
// Set the text value.
ui.glslangOptionsLineEdit->setText(text);
// Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
void rgBuildSettingsViewVulkan::HandleAlternativeCompilerBrowseButtonClicked()
{
QFileDialog fileDialog;
QLineEdit* pLineEdit = ui.compilerBinariesLineEdit;
assert(pLineEdit != nullptr);
if (pLineEdit != nullptr)
{
QString currentDir = (pLineEdit->text().isEmpty() ?
QString::fromStdString(rgConfigManager::Instance().GetLastSelectedFolder()) : pLineEdit->text());
QString selectedDirectory = QFileDialog::getExistingDirectory(this, tr(STR_INCLUDE_DIR_DIALOG_SELECT_DIR_TITLE),
currentDir, QFileDialog::ShowDirsOnly);
if (!selectedDirectory.isEmpty())
{<|fim▁hole|> // Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
}
}
void rgBuildSettingsViewVulkan::HandleAlternativeCompilerLineEditChanged(const QString& text)
{
// Restore the cursor to the original position when the text has changed.
QtCommon::QtUtil::RestoreCursorPosition cursorPosition(ui.compilerBinariesLineEdit);
// Set the text value.
ui.compilerBinariesLineEdit->setText(text);
// Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
void rgBuildSettingsViewVulkan::ConnectLineEditFocusEvents()
{
bool isConnected = false;
isConnected = connect(this->ui.includeDirectoriesLineEdit, &rgLineEdit::LineEditFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.includeDirectoriesLineEdit, &rgLineEdit::LineEditFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.predefinedMacrosLineEdit, &rgLineEdit::LineEditFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.predefinedMacrosLineEdit, &rgLineEdit::LineEditFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.targetGPUsLineEdit, &rgLineEdit::LineEditFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.targetGPUsLineEdit, &rgLineEdit::LineEditFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.ICDLocationLineEdit, &rgLineEdit::LineEditFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.ICDLocationLineEdit, &rgLineEdit::LineEditFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.glslangOptionsLineEdit, &rgLineEdit::LineEditFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.glslangOptionsLineEdit, &rgLineEdit::LineEditFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.compilerBinariesLineEdit, &rgLineEdit::LineEditFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.compilerBinariesLineEdit, &rgLineEdit::LineEditFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleLineEditFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.addTargetGPUsButton, &rgBrowseButton::BrowseButtonFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.addTargetGPUsButton, &rgBrowseButton::BrowseButtonFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.predefinedMacrosBrowseButton, &rgBrowseButton::BrowseButtonFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.predefinedMacrosBrowseButton, &rgBrowseButton::BrowseButtonFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.includeDirsBrowseButton, &rgBrowseButton::BrowseButtonFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.includeDirsBrowseButton, &rgBrowseButton::BrowseButtonFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.ICDLocationBrowseButton, &rgBrowseButton::BrowseButtonFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.ICDLocationBrowseButton, &rgBrowseButton::BrowseButtonFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.compilerBrowseButton, &rgBrowseButton::BrowseButtonFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.compilerBrowseButton, &rgBrowseButton::BrowseButtonFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleBrowseButtonFocusOutEvent);
assert(isConnected);
isConnected = connect(this->ui.enableValidationLayersCheckBox, &rgCheckBox::CheckBoxFocusInEvent, this, &rgBuildSettingsViewVulkan::HandleCheckBoxFocusInEvent);
assert(isConnected);
isConnected = connect(this->ui.enableValidationLayersCheckBox, &rgCheckBox::CheckBoxFocusOutEvent, this, &rgBuildSettingsViewVulkan::HandleCheckBoxFocusOutEvent);
assert(isConnected);
}
void rgBuildSettingsViewVulkan::HandleAddTargetGpusButtonClick()
{
// Set the frame border color to red.
emit SetFrameBorderRedSignal();
// Trim out any spaces within the target GPUs string.
QString selectedGPUs = this->ui.targetGPUsLineEdit->text();
// Create a new Target GPU Selection dialog instance.
m_pTargetGpusDialog = new rgTargetGpusDialog(selectedGPUs, this);
// Register the target gpu dialog box with the scaling manager.
ScalingManager::Get().RegisterObject(m_pTargetGpusDialog);
// Center the dialog on the view (registering with the scaling manager
// shifts it out of the center so we need to manually center it).
rgUtils::CenterOnWidget(m_pTargetGpusDialog, this);
// Present the dialog to the user.
m_pTargetGpusDialog->setModal(true);
int dialogResult = m_pTargetGpusDialog->exec();
// If the user clicked "OK", extract the Checked items into a comma-separated string,
// and put the string in the Target GPUs textbox.
if (dialogResult == 1)
{
// After the dialog is hidden, extract the list of families selected by the user.
std::vector<std::string> selectedFamilies = m_pTargetGpusDialog->GetSelectedCapabilityGroups();
// Remove gfx notation if needed.
std::transform(selectedFamilies.begin(), selectedFamilies.end(), selectedFamilies.begin(),
[&](std::string& family)
{
return rgUtils::RemoveGfxNotation(family);
});
// Create a comma-separated list of GPU families that the user selected.
std::stringstream familyListString;
size_t numFamilies = selectedFamilies.size();
for (size_t familyIndex = 0; familyIndex < numFamilies; ++familyIndex)
{
// Append the family name to the string.
familyListString << selectedFamilies[familyIndex];
// Append a comma between each family name, until the last one.
if ((familyIndex + 1) < numFamilies)
{
familyListString << rgConfigManager::RGA_LIST_DELIMITER;
}
}
// Set the target GPUs text.
ui.targetGPUsLineEdit->setText(familyListString.str().c_str());
// Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
}
}
void rgBuildSettingsViewVulkan::HandleTextEditChanged()
{
// Determine which control's text has been updated.
QLineEdit* pLineEdit = static_cast<QLineEdit*>(QObject::sender());
assert(pLineEdit != nullptr);
if (pLineEdit != nullptr)
{
// Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
}
std::string rgBuildSettingsViewVulkan::GetTitleString()
{
std::stringstream titleString;
// Build the title string.
if (m_isGlobalSettings)
{
// For the global settings.
titleString << STR_BUILD_SETTINGS_DEFAULT_TITLE;
titleString << " ";
titleString << STR_API_NAME_VULKAN;
titleString << " ";
}
else
{
// For project-specific settings.
titleString << STR_BUILD_SETTINGS_PROJECT_TITLE;
titleString << " ";
}
titleString << STR_MENU_BUILD_SETTINGS_LOWER;
return titleString.str();
}
const std::string rgBuildSettingsViewVulkan::GetTitleTooltipString() const
{
std::stringstream tooltipString;
if (m_isGlobalSettings)
{
tooltipString << STR_BUILD_SETTINGS_GLOBAL_TOOLTIP_A;
tooltipString << STR_API_NAME_VULKAN;
tooltipString << STR_BUILD_SETTINGS_GLOBAL_TOOLTIP_B;
}
else
{
tooltipString << STR_BUILD_SETTINGS_PROJECT_TOOLTIP_A;
tooltipString << STR_API_NAME_VULKAN;
tooltipString << STR_BUILD_SETTINGS_PROJECT_TOOLTIP_B;
}
return tooltipString.str();
}
void rgBuildSettingsViewVulkan::UpdateCommandLineText()
{
rgBuildSettingsVulkan apiBuildSetting = PullFromWidgets();
// Generate a command line string from the build settings structure.
std::string buildSettings;
bool ret = rgCliUtils::GenerateVulkanBuildSettingsString(apiBuildSetting, buildSettings);
assert(ret);
if (ret)
{
ui.allOptionsTextEdit->setPlainText(buildSettings.c_str());
}
}
void rgBuildSettingsViewVulkan::HandlePendingChangesStateChanged(bool hasPendingChanges)
{
// Let the base class determine if there is a need to signal listeners
// about the pending changes state.
rgBuildSettingsView::SetHasPendingChanges(hasPendingChanges);
}
bool rgBuildSettingsViewVulkan::IsTargetGpusStringValid(std::vector<std::string>& errors) const
{
bool isValid = true;
// The target GPUs string must be non-empty.
std::string targetGpusString = ui.targetGPUsLineEdit->text().toStdString();
if (targetGpusString.empty())
{
// The Target GPUs field is invalid since it is empty.
errors.push_back(STR_ERR_TARGET_GPUS_CANNOT_BE_EMPTY);
isValid = false;
}
else
{
// Split the comma-separated GPUs string.
std::vector<std::string> targetGPUsVector;
rgUtils::splitString(targetGpusString, rgConfigManager::RGA_LIST_DELIMITER, targetGPUsVector);
// Use the Config Manager to verify that the specified GPUs are valid.
std::vector<std::string> invalidGpus;
rgConfigManager& configManager = rgConfigManager::Instance();
for (const std::string& targetGpuFamilyName : targetGPUsVector)
{
std::string trimmedName;
rgUtils::TrimLeadingAndTrailingWhitespace(targetGpuFamilyName, trimmedName);
// Is the given target GPU family name supported?
if (!configManager.IsGpuFamilySupported(trimmedName))
{
// Add the GPU to a list of invalid names if it's not supported.
invalidGpus.push_back(trimmedName);
}
}
if (!invalidGpus.empty())
{
// Build an error string indicating the invalid GPUs.
std::stringstream errorStream;
errorStream << STR_ERR_INVALID_GPUS_SPECIFIED;
int numInvalid = static_cast<int>(invalidGpus.size());
for (int gpuIndex = 0; gpuIndex < numInvalid; ++gpuIndex)
{
errorStream << invalidGpus[gpuIndex];
if (gpuIndex != (numInvalid - 1))
{
errorStream << ", ";
}
}
// Add an error indicating that the GPU name is invalid.
errors.push_back(errorStream.str());
isValid = false;
}
}
return isValid;
}
bool rgBuildSettingsViewVulkan::ValidatePendingSettings()
{
std::vector<std::string> errorFields;
bool isValid = IsTargetGpusStringValid(errorFields);
if (!isValid)
{
std::stringstream errorStream;
errorStream << STR_ERR_INVALID_PENDING_SETTING;
errorStream << std::endl;
// Loop through all errors and append them to the stream.
for (const std::string error : errorFields)
{
errorStream << error;
errorStream << std::endl;
}
// Display an error message box to the user telling them what to fix.
rgUtils::ShowErrorMessageBox(errorStream.str().c_str(), this);
}
return isValid;
}
void rgBuildSettingsViewVulkan::SetCursor()
{
// Set the cursor for buttons to pointing hand cursor.
ui.addTargetGPUsButton->setCursor(Qt::PointingHandCursor);
ui.includeDirsBrowseButton->setCursor(Qt::PointingHandCursor);
ui.predefinedMacrosBrowseButton->setCursor(Qt::PointingHandCursor);
ui.ICDLocationBrowseButton->setCursor(Qt::PointingHandCursor);
ui.compilerBrowseButton->setCursor(Qt::PointingHandCursor);
// Set the cursor for check boxes to pointing hand cursor.
ui.generateDebugInfoCheckBox->setCursor(Qt::PointingHandCursor);
ui.noExplicitBindingsCheckBox->setCursor(Qt::PointingHandCursor);
ui.useHLSLBlockOffsetsCheckBox->setCursor(Qt::PointingHandCursor);
ui.useHLSLIOMappingCheckBox->setCursor(Qt::PointingHandCursor);
ui.enableValidationLayersCheckBox->setCursor(Qt::PointingHandCursor);
}
void rgBuildSettingsViewVulkan::HandleIncludeDirsBrowseButtonClick()
{
// Set the frame border color to red.
emit SetFrameBorderRedSignal();
// Position the window in the middle of the screen.
m_pIncludeDirectoriesView->setGeometry(QStyle::alignedRect(Qt::LeftToRight, Qt::AlignCenter, m_pIncludeDirectoriesView->size(), qApp->desktop()->availableGeometry()));
// Set the current include dirs.
m_pIncludeDirectoriesView->SetListItems(ui.includeDirectoriesLineEdit->text());
// Show the window.
m_pIncludeDirectoriesView->exec();
}
void rgBuildSettingsViewVulkan::HandleIncludeDirsUpdated(QStringList includeDirs)
{
QString includeDirsText;
// Create a delimiter-separated string.
if (!includeDirs.isEmpty())
{
includeDirsText = includeDirs.join(s_OPTIONS_LIST_DELIMITER);
}
// Update the text box.
ui.includeDirectoriesLineEdit->setText(includeDirsText);
// Inform the rest of the UI that the settings have been changed.
HandlePendingChangesStateChanged(GetHasPendingChanges());
}
void rgBuildSettingsViewVulkan::HandlePreprocessorDirectivesBrowseButtonClick()
{
// Set the frame border color to red.
emit SetFrameBorderRedSignal();
// Position the window in the middle of the screen.
m_pPreprocessorDirectivesDialog->setGeometry(QStyle::alignedRect(Qt::LeftToRight, Qt::AlignCenter, m_pPreprocessorDirectivesDialog->size(), qApp->desktop()->availableGeometry()));
// Set the current preprocessor directives in the dialog.
m_pPreprocessorDirectivesDialog->SetListItems(ui.predefinedMacrosLineEdit->text());
// Show the dialog.
m_pPreprocessorDirectivesDialog->exec();
}
void rgBuildSettingsViewVulkan::HandlePreprocessorDirectivesUpdated(QStringList preprocessorDirectives)
{
QString preprocessorDirectivesText;
// Create a delimiter-separated string.
if (!preprocessorDirectives.isEmpty())
{
preprocessorDirectivesText = preprocessorDirectives.join(s_OPTIONS_LIST_DELIMITER);
}
// Update the text box.
ui.predefinedMacrosLineEdit->setText(preprocessorDirectivesText);
// Inform the rest of the UI that the settings have been changed.
HandlePendingChangesStateChanged(GetHasPendingChanges());
}
void rgBuildSettingsViewVulkan::HandleLineEditFocusInEvent()
{
emit SetFrameBorderRedSignal();
}
void rgBuildSettingsViewVulkan::HandleLineEditFocusOutEvent()
{
emit SetFrameBorderBlackSignal();
}
void rgBuildSettingsViewVulkan::HandleBrowseButtonFocusInEvent()
{
emit SetFrameBorderRedSignal();
}
void rgBuildSettingsViewVulkan::HandleBrowseButtonFocusOutEvent()
{
emit SetFrameBorderBlackSignal();
}
void rgBuildSettingsViewVulkan::HandleCheckBoxFocusInEvent()
{
emit SetFrameBorderRedSignal();
}
void rgBuildSettingsViewVulkan::HandleCheckBoxFocusOutEvent()
{
emit SetFrameBorderBlackSignal();
}
void rgBuildSettingsViewVulkan::HandleCheckBoxClickedEvent()
{
emit SetFrameBorderRedSignal();
}
void rgBuildSettingsViewVulkan::HandleCheckboxStateChanged()
{
// Make sure it was a checkbox that caused this state change (just to be sure).
QCheckBox* pCheckBox = static_cast<QCheckBox*>(QObject::sender());
assert(pCheckBox != nullptr);
if (pCheckBox != nullptr)
{
// Inform the UI of a possible change to the pending state.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Update the command line preview text.
UpdateCommandLineText();
}
}
bool rgBuildSettingsViewVulkan::GetHasPendingChanges() const
{
rgBuildSettingsVulkan currentSettings = PullFromWidgets();
bool hasChanges = !m_initialSettings.HasSameSettings(currentSettings);
return hasChanges;
}
bool rgBuildSettingsViewVulkan::RevertPendingChanges()
{
PushToWidgets(m_initialSettings);
// Make sure the rest of the UI knows that the settings don't need to be saved.
HandlePendingChangesStateChanged(false);
return false;
}
void rgBuildSettingsViewVulkan::RestoreDefaultSettings()
{
bool isRestored = false;
if (m_isGlobalSettings)
{
// If this is for the global settings, then restore to the hard-coded defaults.
// Get the hardcoded default build settings.
std::shared_ptr<rgBuildSettings> pDefaultBuildSettings = rgConfigManager::GetDefaultBuildSettings(rgProjectAPI::Vulkan);
std::shared_ptr<rgBuildSettingsVulkan> pApiBuildSettings = std::dynamic_pointer_cast<rgBuildSettingsVulkan>(pDefaultBuildSettings);
assert(pApiBuildSettings != nullptr);
if (pApiBuildSettings != nullptr)
{
// Reset our initial settings back to the defaults.
m_initialSettings = *pApiBuildSettings;
// Update the UI to reflect the new initial settings.
PushToWidgets(m_initialSettings);
// Update the ConfigManager to use the new settings.
rgConfigManager::Instance().SetApiBuildSettings(STR_API_NAME_VULKAN, &m_initialSettings);
// Save the settings file.
isRestored = rgConfigManager::Instance().SaveGlobalConfigFile();
// Inform the rest of the UI that the settings have been changed.
HandlePendingChangesStateChanged(GetHasPendingChanges());
}
}
else
{
// This view is showing project-specific settings, so restore back to the stored settings in the project.
std::shared_ptr<rgBuildSettings> pDefaultSettings = rgConfigManager::Instance().GetUserGlobalBuildSettings(rgProjectAPI::Vulkan);
auto pVkDefaultSettings = std::dynamic_pointer_cast<rgBuildSettingsVulkan>(pDefaultSettings);
m_initialSettings = *pVkDefaultSettings;
PushToWidgets(m_initialSettings);
// Inform the rest of the UI that the settings have been changed.
HandlePendingChangesStateChanged(GetHasPendingChanges());
// Let the rgBuildView know that the build settings have been updated.
emit ProjectBuildSettingsSaved(pVkDefaultSettings);
isRestored = true;
}
// Show an error dialog if the settings failed to be reset.
if (!isRestored)
{
rgUtils::ShowErrorMessageBox(STR_ERR_CANNOT_RESTORE_DEFAULT_SETTINGS, this);
}
}
bool rgBuildSettingsViewVulkan::SaveSettings()
{
bool canBeSaved = ValidatePendingSettings();
if (canBeSaved)
{
// Reset the initial settings to match what the UI shows.
m_initialSettings = PullFromWidgets();
if (m_isGlobalSettings)
{
// Update the config manager to use these new settings.
rgConfigManager& configManager = rgConfigManager::Instance();
configManager.SetApiBuildSettings(STR_API_NAME_VULKAN, &m_initialSettings);
// Save the global config settings.
canBeSaved = configManager.SaveGlobalConfigFile();
}
else
{
// Save the project settings.
std::shared_ptr<rgBuildSettingsVulkan> pTmpPtr = std::make_shared<rgBuildSettingsVulkan>(m_initialSettings);
emit ProjectBuildSettingsSaved(pTmpPtr);
}
if (canBeSaved)
{
// Make sure the rest of the UI knows that the settings have been saved.
HandlePendingChangesStateChanged(false);
}
}
// Set focus to target GPUs browse button.
ui.addTargetGPUsButton->setFocus();
return canBeSaved;
}
void rgBuildSettingsViewVulkan::mousePressEvent(QMouseEvent* pEvent)
{
Q_UNUSED(pEvent);
emit SetFrameBorderRedSignal();
}
bool rgBuildSettingsViewVulkan::eventFilter(QObject* pObject, QEvent* pEvent)
{
// Intercept events for "All Options" widget.
if (pEvent != nullptr)
{
if (pEvent->type() == QEvent::FocusIn)
{
HandleLineEditFocusInEvent();
}
else if (pEvent->type() == QEvent::FocusOut)
{
HandleLineEditFocusOutEvent();
}
}
// Continue default processing.
return QObject::eventFilter(pObject, pEvent);
}
void rgBuildSettingsViewVulkan::SetInitialWidgetFocus()
{
ui.addTargetGPUsButton->setFocus();
}<|fim▁end|> | ui.compilerBinariesLineEdit->setText(selectedDirectory);
|
<|file_name|>security_group_test.go<|end_file_name|><|fim▁begin|>package securitygroup_test
import (
"code.cloudfoundry.org/cli/cf/api/securitygroups/securitygroupsfakes"
"code.cloudfoundry.org/cli/cf/configuration/coreconfig"
"code.cloudfoundry.org/cli/cf/errors"
"code.cloudfoundry.org/cli/cf/models"
"code.cloudfoundry.org/cli/cf/requirements"
"code.cloudfoundry.org/cli/cf/requirements/requirementsfakes"
testcmd "code.cloudfoundry.org/cli/util/testhelpers/commands"
testconfig "code.cloudfoundry.org/cli/util/testhelpers/configuration"
testterm "code.cloudfoundry.org/cli/util/testhelpers/terminal"
"code.cloudfoundry.org/cli/cf/commandregistry"
. "code.cloudfoundry.org/cli/util/testhelpers/matchers"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("security-group command", func() {
var (
ui *testterm.FakeUI
securityGroupRepo *securitygroupsfakes.FakeSecurityGroupRepo
requirementsFactory *requirementsfakes.FakeFactory
configRepo coreconfig.Repository
deps commandregistry.Dependency
)
updateCommandDependency := func(pluginCall bool) {
deps.UI = ui
deps.RepoLocator = deps.RepoLocator.SetSecurityGroupRepository(securityGroupRepo)
deps.Config = configRepo
commandregistry.Commands.SetCommand(commandregistry.Commands.FindCommand("security-group").SetDependency(deps, pluginCall))
}
BeforeEach(func() {
ui = &testterm.FakeUI{}
requirementsFactory = new(requirementsfakes.FakeFactory)
securityGroupRepo = new(securitygroupsfakes.FakeSecurityGroupRepo)
configRepo = testconfig.NewRepositoryWithDefaults()
})
runCommand := func(args ...string) bool {
return testcmd.RunCLICommand("security-group", args, requirementsFactory, updateCommandDependency, false, ui)
}
Describe("requirements", func() {
It("should fail if not logged in", func() {
requirementsFactory.NewLoginRequirementReturns(requirements.Failing{Message: "not logged in"})
Expect(runCommand("my-group")).To(BeFalse())
})
It("should fail with usage when not provided a single argument", func() {
requirementsFactory.NewLoginRequirementReturns(requirements.Passing{})
runCommand("whoops", "I can't believe", "I accidentally", "the whole thing")
Expect(ui.Outputs()).To(ContainSubstrings(
[]string{"Incorrect Usage", "Requires an argument"},
))
})
})
Context("when logged in", func() {
BeforeEach(func() {
requirementsFactory.NewLoginRequirementReturns(requirements.Passing{})
})<|fim▁hole|>
Context("when the group with the given name exists", func() {
BeforeEach(func() {
rulesMap := []map[string]interface{}{{"just-pretend": "that-this-is-correct"}}
securityGroup := models.SecurityGroup{
SecurityGroupFields: models.SecurityGroupFields{
Name: "my-group",
GUID: "group-guid",
Rules: rulesMap,
},
Spaces: []models.Space{
{
SpaceFields: models.SpaceFields{GUID: "my-space-guid-1", Name: "space-1"},
Organization: models.OrganizationFields{GUID: "my-org-guid-1", Name: "org-1"},
},
{
SpaceFields: models.SpaceFields{GUID: "my-space-guid", Name: "space-2"},
Organization: models.OrganizationFields{GUID: "my-org-guid-1", Name: "org-2"},
},
},
}
securityGroupRepo.ReadReturns(securityGroup, nil)
})
It("should fetch the security group from its repo", func() {
runCommand("my-group")
Expect(securityGroupRepo.ReadArgsForCall(0)).To(Equal("my-group"))
})
It("tells the user what it's about to do and then shows the group", func() {
runCommand("my-group")
Expect(ui.Outputs()).To(ContainSubstrings(
[]string{"Getting", "security group", "my-group", "my-user"},
[]string{"OK"},
[]string{"Name", "my-group"},
[]string{"Rules"},
[]string{"["},
[]string{"{"},
[]string{"just-pretend", "that-this-is-correct"},
[]string{"}"},
[]string{"]"},
[]string{"#0", "org-1", "space-1"},
[]string{"#1", "org-2", "space-2"},
))
})
It("tells the user if no spaces are assigned", func() {
securityGroup := models.SecurityGroup{
SecurityGroupFields: models.SecurityGroupFields{
Name: "my-group",
GUID: "group-guid",
Rules: []map[string]interface{}{},
},
Spaces: []models.Space{},
}
securityGroupRepo.ReadReturns(securityGroup, nil)
runCommand("my-group")
Expect(ui.Outputs()).To(ContainSubstrings(
[]string{"No spaces assigned"},
))
})
})
It("fails and warns the user if a group with that name could not be found", func() {
securityGroupRepo.ReadReturns(models.SecurityGroup{}, errors.New("half-past-tea-time"))
runCommand("im-late!")
Expect(ui.Outputs()).To(ContainSubstrings([]string{"FAILED"}))
})
})
})<|fim▁end|> | |
<|file_name|>tcp_client_socket_win.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/socket/tcp_client_socket_win.h"
#include <mstcpip.h>
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "base/metrics/stats_counters.h"
#include "base/string_util.h"
#include "base/win/object_watcher.h"
#include "base/win/windows_version.h"
#include "net/base/connection_type_histograms.h"
#include "net/base/io_buffer.h"
#include "net/base/ip_endpoint.h"
#include "net/base/net_errors.h"
#include "net/base/net_log.h"
#include "net/base/net_util.h"
#include "net/base/network_change_notifier.h"
#include "net/base/winsock_init.h"
#include "net/base/winsock_util.h"
#include "net/socket/socket_net_log_params.h"
namespace net {
namespace {
const int kTCPKeepAliveSeconds = 45;
bool g_disable_overlapped_reads = false;
bool SetSocketReceiveBufferSize(SOCKET socket, int32 size) {
int rv = setsockopt(socket, SOL_SOCKET, SO_RCVBUF,
reinterpret_cast<const char*>(&size), sizeof(size));
DCHECK(!rv) << "Could not set socket receive buffer size: " << GetLastError();
return rv == 0;
}
bool SetSocketSendBufferSize(SOCKET socket, int32 size) {
int rv = setsockopt(socket, SOL_SOCKET, SO_SNDBUF,
reinterpret_cast<const char*>(&size), sizeof(size));
DCHECK(!rv) << "Could not set socket send buffer size: " << GetLastError();
return rv == 0;
}
// Disable Nagle.
// The Nagle implementation on windows is governed by RFC 896. The idea
// behind Nagle is to reduce small packets on the network. When Nagle is
// enabled, if a partial packet has been sent, the TCP stack will disallow
// further *partial* packets until an ACK has been received from the other
// side. Good applications should always strive to send as much data as
// possible and avoid partial-packet sends. However, in most real world
// applications, there are edge cases where this does not happen, and two
// partial packets may be sent back to back. For a browser, it is NEVER
// a benefit to delay for an RTT before the second packet is sent.
//
// As a practical example in Chromium today, consider the case of a small
// POST. I have verified this:
// Client writes 649 bytes of header (partial packet #1)
// Client writes 50 bytes of POST data (partial packet #2)
// In the above example, with Nagle, a RTT delay is inserted between these
// two sends due to nagle. RTTs can easily be 100ms or more. The best
// fix is to make sure that for POSTing data, we write as much data as
// possible and minimize partial packets. We will fix that. But disabling
// Nagle also ensure we don't run into this delay in other edge cases.
// See also:
// http://technet.microsoft.com/en-us/library/bb726981.aspx
bool DisableNagle(SOCKET socket, bool disable) {
BOOL val = disable ? TRUE : FALSE;
int rv = setsockopt(socket, IPPROTO_TCP, TCP_NODELAY,
reinterpret_cast<const char*>(&val),
sizeof(val));
DCHECK(!rv) << "Could not disable nagle";
return rv == 0;
}
// Enable TCP Keep-Alive to prevent NAT routers from timing out TCP
// connections. See http://crbug.com/27400 for details.
bool SetTCPKeepAlive(SOCKET socket, BOOL enable, int delay_secs) {
int delay = delay_secs * 1000;
struct tcp_keepalive keepalive_vals = {
enable ? 1 : 0, // TCP keep-alive on.
delay, // Delay seconds before sending first TCP keep-alive packet.
delay, // Delay seconds between sending TCP keep-alive packets.
};
DWORD bytes_returned = 0xABAB;
int rv = WSAIoctl(socket, SIO_KEEPALIVE_VALS, &keepalive_vals,
sizeof(keepalive_vals), NULL, 0,
&bytes_returned, NULL, NULL);
DCHECK(!rv) << "Could not enable TCP Keep-Alive for socket: " << socket
<< " [error: " << WSAGetLastError() << "].";
// Disregard any failure in disabling nagle or enabling TCP Keep-Alive.
return rv == 0;
}
// Sets socket parameters. Returns the OS error code (or 0 on
// success).
int SetupSocket(SOCKET socket) {
// Increase the socket buffer sizes from the default sizes for WinXP. In
// performance testing, there is substantial benefit by increasing from 8KB
// to 64KB.
// See also:
// http://support.microsoft.com/kb/823764/EN-US
// On Vista, if we manually set these sizes, Vista turns off its receive
// window auto-tuning feature.
// http://blogs.msdn.com/wndp/archive/2006/05/05/Winhec-blog-tcpip-2.aspx
// Since Vista's auto-tune is better than any static value we can could set,
// only change these on pre-vista machines.
if (base::win::GetVersion() < base::win::VERSION_VISTA) {
const int32 kSocketBufferSize = 64 * 1024;
SetSocketReceiveBufferSize(socket, kSocketBufferSize);
SetSocketSendBufferSize(socket, kSocketBufferSize);
}
DisableNagle(socket, true);
SetTCPKeepAlive(socket, true, kTCPKeepAliveSeconds);
return 0;
}
// Creates a new socket and sets default parameters for it. Returns
// the OS error code (or 0 on success).
int CreateSocket(int family, SOCKET* socket) {
*socket = WSASocket(family, SOCK_STREAM, IPPROTO_TCP, NULL, 0,
WSA_FLAG_OVERLAPPED);
if (*socket == INVALID_SOCKET) {
int os_error = WSAGetLastError();
LOG(ERROR) << "WSASocket failed: " << os_error;
return os_error;
}
int error = SetupSocket(*socket);
if (error) {
if (closesocket(*socket) < 0)
PLOG(ERROR) << "closesocket";
*socket = INVALID_SOCKET;
return error;
}
return 0;
}
int MapConnectError(int os_error) {
switch (os_error) {
// connect fails with WSAEACCES when Windows Firewall blocks the
// connection.
case WSAEACCES:
return ERR_NETWORK_ACCESS_DENIED;
case WSAETIMEDOUT:
return ERR_CONNECTION_TIMED_OUT;
default: {
int net_error = MapSystemError(os_error);
if (net_error == ERR_FAILED)
return ERR_CONNECTION_FAILED; // More specific than ERR_FAILED.
// Give a more specific error when the user is offline.
if (net_error == ERR_ADDRESS_UNREACHABLE &&
NetworkChangeNotifier::IsOffline()) {
return ERR_INTERNET_DISCONNECTED;
}
return net_error;
}
}
}
} // namespace
//-----------------------------------------------------------------------------
// This class encapsulates all the state that has to be preserved as long as
// there is a network IO operation in progress. If the owner TCPClientSocketWin
// is destroyed while an operation is in progress, the Core is detached and it
// lives until the operation completes and the OS doesn't reference any resource
// declared on this class anymore.
class TCPClientSocketWin::Core : public base::RefCounted<Core> {
public:
explicit Core(TCPClientSocketWin* socket);
// Start watching for the end of a read or write operation.
void WatchForRead();
void WatchForWrite();
// The TCPClientSocketWin is going away.
void Detach() { socket_ = NULL; }
// Throttle the read size based on our current slow start state.
// Returns the throttled read size.
int ThrottleReadSize(int size) {
if (slow_start_throttle_ < kMaxSlowStartThrottle) {
size = std::min(size, slow_start_throttle_);
slow_start_throttle_ *= 2;
}
return size;
}
// The separate OVERLAPPED variables for asynchronous operation.
// |read_overlapped_| is used for both Connect() and Read().
// |write_overlapped_| is only used for Write();
OVERLAPPED read_overlapped_;
OVERLAPPED write_overlapped_;
// The buffers used in Read() and Write().
scoped_refptr<IOBuffer> read_iobuffer_;
scoped_refptr<IOBuffer> write_iobuffer_;
int read_buffer_length_;
int write_buffer_length_;
// Remember the state of g_disable_overlapped_reads for the duration of the
// socket based on what it was when the socket was created.
bool disable_overlapped_reads_;
bool non_blocking_reads_initialized_;
<|fim▁hole|> private:
friend class base::RefCounted<Core>;
class ReadDelegate : public base::win::ObjectWatcher::Delegate {
public:
explicit ReadDelegate(Core* core) : core_(core) {}
virtual ~ReadDelegate() {}
// base::ObjectWatcher::Delegate methods:
virtual void OnObjectSignaled(HANDLE object);
private:
Core* const core_;
};
class WriteDelegate : public base::win::ObjectWatcher::Delegate {
public:
explicit WriteDelegate(Core* core) : core_(core) {}
virtual ~WriteDelegate() {}
// base::ObjectWatcher::Delegate methods:
virtual void OnObjectSignaled(HANDLE object);
private:
Core* const core_;
};
~Core();
// The socket that created this object.
TCPClientSocketWin* socket_;
// |reader_| handles the signals from |read_watcher_|.
ReadDelegate reader_;
// |writer_| handles the signals from |write_watcher_|.
WriteDelegate writer_;
// |read_watcher_| watches for events from Connect() and Read().
base::win::ObjectWatcher read_watcher_;
// |write_watcher_| watches for events from Write();
base::win::ObjectWatcher write_watcher_;
// When doing reads from the socket, we try to mirror TCP's slow start.
// We do this because otherwise the async IO subsystem artifically delays
// returning data to the application.
static const int kInitialSlowStartThrottle = 1 * 1024;
static const int kMaxSlowStartThrottle = 32 * kInitialSlowStartThrottle;
int slow_start_throttle_;
DISALLOW_COPY_AND_ASSIGN(Core);
};
TCPClientSocketWin::Core::Core(
TCPClientSocketWin* socket)
: read_buffer_length_(0),
write_buffer_length_(0),
disable_overlapped_reads_(g_disable_overlapped_reads),
non_blocking_reads_initialized_(false),
socket_(socket),
ALLOW_THIS_IN_INITIALIZER_LIST(reader_(this)),
ALLOW_THIS_IN_INITIALIZER_LIST(writer_(this)),
slow_start_throttle_(kInitialSlowStartThrottle) {
memset(&read_overlapped_, 0, sizeof(read_overlapped_));
memset(&write_overlapped_, 0, sizeof(write_overlapped_));
read_overlapped_.hEvent = WSACreateEvent();
write_overlapped_.hEvent = WSACreateEvent();
}
TCPClientSocketWin::Core::~Core() {
// Make sure the message loop is not watching this object anymore.
read_watcher_.StopWatching();
write_watcher_.StopWatching();
WSACloseEvent(read_overlapped_.hEvent);
memset(&read_overlapped_, 0xaf, sizeof(read_overlapped_));
WSACloseEvent(write_overlapped_.hEvent);
memset(&write_overlapped_, 0xaf, sizeof(write_overlapped_));
}
void TCPClientSocketWin::Core::WatchForRead() {
// We grab an extra reference because there is an IO operation in progress.
// Balanced in ReadDelegate::OnObjectSignaled().
AddRef();
read_watcher_.StartWatching(read_overlapped_.hEvent, &reader_);
}
void TCPClientSocketWin::Core::WatchForWrite() {
// We grab an extra reference because there is an IO operation in progress.
// Balanced in WriteDelegate::OnObjectSignaled().
AddRef();
write_watcher_.StartWatching(write_overlapped_.hEvent, &writer_);
}
void TCPClientSocketWin::Core::ReadDelegate::OnObjectSignaled(
HANDLE object) {
DCHECK_EQ(object, core_->read_overlapped_.hEvent);
if (core_->socket_) {
if (core_->socket_->waiting_connect()) {
core_->socket_->DidCompleteConnect();
} else if (core_->disable_overlapped_reads_) {
core_->socket_->DidSignalRead();
} else {
core_->socket_->DidCompleteRead();
}
}
core_->Release();
}
void TCPClientSocketWin::Core::WriteDelegate::OnObjectSignaled(
HANDLE object) {
DCHECK_EQ(object, core_->write_overlapped_.hEvent);
if (core_->socket_)
core_->socket_->DidCompleteWrite();
core_->Release();
}
//-----------------------------------------------------------------------------
TCPClientSocketWin::TCPClientSocketWin(const AddressList& addresses,
net::NetLog* net_log,
const net::NetLog::Source& source)
: socket_(INVALID_SOCKET),
bound_socket_(INVALID_SOCKET),
addresses_(addresses),
current_address_index_(-1),
waiting_read_(false),
waiting_write_(false),
next_connect_state_(CONNECT_STATE_NONE),
connect_os_error_(0),
net_log_(BoundNetLog::Make(net_log, NetLog::SOURCE_SOCKET)),
previously_disconnected_(false) {
net_log_.BeginEvent(NetLog::TYPE_SOCKET_ALIVE,
source.ToEventParametersCallback());
EnsureWinsockInit();
}
TCPClientSocketWin::~TCPClientSocketWin() {
Disconnect();
net_log_.EndEvent(NetLog::TYPE_SOCKET_ALIVE);
}
int TCPClientSocketWin::AdoptSocket(SOCKET socket) {
DCHECK_EQ(socket_, INVALID_SOCKET);
int error = SetupSocket(socket);
if (error)
return MapSystemError(error);
socket_ = socket;
SetNonBlocking(socket_);
core_ = new Core(this);
current_address_index_ = 0;
use_history_.set_was_ever_connected();
return OK;
}
int TCPClientSocketWin::Bind(const IPEndPoint& address) {
if (current_address_index_ >= 0 || bind_address_.get()) {
// Cannot bind the socket if we are already connected or connecting.
return ERR_UNEXPECTED;
}
SockaddrStorage storage;
if (!address.ToSockAddr(storage.addr, &storage.addr_len))
return ERR_INVALID_ARGUMENT;
// Create |bound_socket_| and try to bind it to |address|.
int error = CreateSocket(address.GetSockAddrFamily(), &bound_socket_);
if (error)
return MapSystemError(error);
if (bind(bound_socket_, storage.addr, storage.addr_len)) {
error = errno;
if (closesocket(bound_socket_) < 0)
PLOG(ERROR) << "closesocket";
bound_socket_ = INVALID_SOCKET;
return MapSystemError(error);
}
bind_address_.reset(new IPEndPoint(address));
return 0;
}
int TCPClientSocketWin::Connect(const CompletionCallback& callback) {
DCHECK(CalledOnValidThread());
// If already connected, then just return OK.
if (socket_ != INVALID_SOCKET)
return OK;
base::StatsCounter connects("tcp.connect");
connects.Increment();
net_log_.BeginEvent(NetLog::TYPE_TCP_CONNECT,
addresses_.CreateNetLogCallback());
// We will try to connect to each address in addresses_. Start with the
// first one in the list.
next_connect_state_ = CONNECT_STATE_CONNECT;
current_address_index_ = 0;
int rv = DoConnectLoop(OK);
if (rv == ERR_IO_PENDING) {
// Synchronous operation not supported.
DCHECK(!callback.is_null());
// TODO(ajwong): Is setting read_callback_ the right thing to do here??
read_callback_ = callback;
} else {
LogConnectCompletion(rv);
}
return rv;
}
int TCPClientSocketWin::DoConnectLoop(int result) {
DCHECK_NE(next_connect_state_, CONNECT_STATE_NONE);
int rv = result;
do {
ConnectState state = next_connect_state_;
next_connect_state_ = CONNECT_STATE_NONE;
switch (state) {
case CONNECT_STATE_CONNECT:
DCHECK_EQ(OK, rv);
rv = DoConnect();
break;
case CONNECT_STATE_CONNECT_COMPLETE:
rv = DoConnectComplete(rv);
break;
default:
LOG(DFATAL) << "bad state " << state;
rv = ERR_UNEXPECTED;
break;
}
} while (rv != ERR_IO_PENDING && next_connect_state_ != CONNECT_STATE_NONE);
return rv;
}
int TCPClientSocketWin::DoConnect() {
DCHECK_GE(current_address_index_, 0);
DCHECK_LT(current_address_index_, static_cast<int>(addresses_.size()));
DCHECK_EQ(0, connect_os_error_);
const IPEndPoint& endpoint = addresses_[current_address_index_];
if (previously_disconnected_) {
use_history_.Reset();
previously_disconnected_ = false;
}
net_log_.BeginEvent(NetLog::TYPE_TCP_CONNECT_ATTEMPT,
CreateNetLogIPEndPointCallback(&endpoint));
next_connect_state_ = CONNECT_STATE_CONNECT_COMPLETE;
if (bound_socket_ != INVALID_SOCKET) {
DCHECK(bind_address_.get());
socket_ = bound_socket_;
bound_socket_ = INVALID_SOCKET;
} else {
connect_os_error_ = CreateSocket(endpoint.GetSockAddrFamily(), &socket_);
if (connect_os_error_ != 0)
return MapSystemError(connect_os_error_);
if (bind_address_.get()) {
SockaddrStorage storage;
if (!bind_address_->ToSockAddr(storage.addr, &storage.addr_len))
return ERR_INVALID_ARGUMENT;
if (bind(socket_, storage.addr, storage.addr_len))
return MapSystemError(errno);
}
}
DCHECK(!core_);
core_ = new Core(this);
// WSAEventSelect sets the socket to non-blocking mode as a side effect.
// Our connect() and recv() calls require that the socket be non-blocking.
WSAEventSelect(socket_, core_->read_overlapped_.hEvent, FD_CONNECT);
SockaddrStorage storage;
if (!endpoint.ToSockAddr(storage.addr, &storage.addr_len))
return ERR_INVALID_ARGUMENT;
if (!connect(socket_, storage.addr, storage.addr_len)) {
// Connected without waiting!
//
// The MSDN page for connect says:
// With a nonblocking socket, the connection attempt cannot be completed
// immediately. In this case, connect will return SOCKET_ERROR, and
// WSAGetLastError will return WSAEWOULDBLOCK.
// which implies that for a nonblocking socket, connect never returns 0.
// It's not documented whether the event object will be signaled or not
// if connect does return 0. So the code below is essentially dead code
// and we don't know if it's correct.
NOTREACHED();
if (ResetEventIfSignaled(core_->read_overlapped_.hEvent))
return OK;
} else {
int os_error = WSAGetLastError();
if (os_error != WSAEWOULDBLOCK) {
LOG(ERROR) << "connect failed: " << os_error;
connect_os_error_ = os_error;
return MapConnectError(os_error);
}
}
core_->WatchForRead();
return ERR_IO_PENDING;
}
int TCPClientSocketWin::DoConnectComplete(int result) {
// Log the end of this attempt (and any OS error it threw).
int os_error = connect_os_error_;
connect_os_error_ = 0;
if (result != OK) {
net_log_.EndEvent(NetLog::TYPE_TCP_CONNECT_ATTEMPT,
NetLog::IntegerCallback("os_error", os_error));
} else {
net_log_.EndEvent(NetLog::TYPE_TCP_CONNECT_ATTEMPT);
}
if (result == OK) {
use_history_.set_was_ever_connected();
return OK; // Done!
}
// Close whatever partially connected socket we currently have.
DoDisconnect();
// Try to fall back to the next address in the list.
if (current_address_index_ + 1 < static_cast<int>(addresses_.size())) {
next_connect_state_ = CONNECT_STATE_CONNECT;
++current_address_index_;
return OK;
}
// Otherwise there is nothing to fall back to, so give up.
return result;
}
void TCPClientSocketWin::Disconnect() {
DCHECK(CalledOnValidThread());
DoDisconnect();
current_address_index_ = -1;
bind_address_.reset();
}
void TCPClientSocketWin::DoDisconnect() {
DCHECK(CalledOnValidThread());
if (socket_ == INVALID_SOCKET)
return;
// Note: don't use CancelIo to cancel pending IO because it doesn't work
// when there is a Winsock layered service provider.
// In most socket implementations, closing a socket results in a graceful
// connection shutdown, but in Winsock we have to call shutdown explicitly.
// See the MSDN page "Graceful Shutdown, Linger Options, and Socket Closure"
// at http://msdn.microsoft.com/en-us/library/ms738547.aspx
shutdown(socket_, SD_SEND);
// This cancels any pending IO.
closesocket(socket_);
socket_ = INVALID_SOCKET;
if (waiting_connect()) {
// We closed the socket, so this notification will never come.
// From MSDN' WSAEventSelect documentation:
// "Closing a socket with closesocket also cancels the association and
// selection of network events specified in WSAEventSelect for the socket".
core_->Release();
}
waiting_read_ = false;
waiting_write_ = false;
core_->Detach();
core_ = NULL;
previously_disconnected_ = true;
}
bool TCPClientSocketWin::IsConnected() const {
DCHECK(CalledOnValidThread());
if (socket_ == INVALID_SOCKET || waiting_connect())
return false;
if (waiting_read_)
return true;
// Check if connection is alive.
char c;
int rv = recv(socket_, &c, 1, MSG_PEEK);
if (rv == 0)
return false;
if (rv == SOCKET_ERROR && WSAGetLastError() != WSAEWOULDBLOCK)
return false;
return true;
}
bool TCPClientSocketWin::IsConnectedAndIdle() const {
DCHECK(CalledOnValidThread());
if (socket_ == INVALID_SOCKET || waiting_connect())
return false;
if (waiting_read_)
return true;
// Check if connection is alive and we haven't received any data
// unexpectedly.
char c;
int rv = recv(socket_, &c, 1, MSG_PEEK);
if (rv >= 0)
return false;
if (WSAGetLastError() != WSAEWOULDBLOCK)
return false;
return true;
}
int TCPClientSocketWin::GetPeerAddress(IPEndPoint* address) const {
DCHECK(CalledOnValidThread());
DCHECK(address);
if (!IsConnected())
return ERR_SOCKET_NOT_CONNECTED;
*address = addresses_[current_address_index_];
return OK;
}
int TCPClientSocketWin::GetLocalAddress(IPEndPoint* address) const {
DCHECK(CalledOnValidThread());
DCHECK(address);
if (socket_ == INVALID_SOCKET) {
if (bind_address_.get()) {
*address = *bind_address_;
return OK;
}
return ERR_SOCKET_NOT_CONNECTED;
}
struct sockaddr_storage addr_storage;
socklen_t addr_len = sizeof(addr_storage);
struct sockaddr* addr = reinterpret_cast<struct sockaddr*>(&addr_storage);
if (getsockname(socket_, addr, &addr_len))
return MapSystemError(WSAGetLastError());
if (!address->FromSockAddr(addr, addr_len))
return ERR_FAILED;
return OK;
}
void TCPClientSocketWin::SetSubresourceSpeculation() {
use_history_.set_subresource_speculation();
}
void TCPClientSocketWin::SetOmniboxSpeculation() {
use_history_.set_omnibox_speculation();
}
bool TCPClientSocketWin::WasEverUsed() const {
return use_history_.was_used_to_convey_data();
}
bool TCPClientSocketWin::UsingTCPFastOpen() const {
// Not supported on windows.
return false;
}
bool TCPClientSocketWin::WasNpnNegotiated() const {
return false;
}
NextProto TCPClientSocketWin::GetNegotiatedProtocol() const {
return kProtoUnknown;
}
bool TCPClientSocketWin::GetSSLInfo(SSLInfo* ssl_info) {
return false;
}
int TCPClientSocketWin::Read(IOBuffer* buf,
int buf_len,
const CompletionCallback& callback) {
DCHECK(CalledOnValidThread());
DCHECK_NE(socket_, INVALID_SOCKET);
DCHECK(!waiting_read_);
DCHECK(read_callback_.is_null());
DCHECK(!core_->read_iobuffer_);
return DoRead(buf, buf_len, callback);
}
int TCPClientSocketWin::Write(IOBuffer* buf,
int buf_len,
const CompletionCallback& callback) {
DCHECK(CalledOnValidThread());
DCHECK_NE(socket_, INVALID_SOCKET);
DCHECK(!waiting_write_);
DCHECK(write_callback_.is_null());
DCHECK_GT(buf_len, 0);
DCHECK(!core_->write_iobuffer_);
base::StatsCounter writes("tcp.writes");
writes.Increment();
WSABUF write_buffer;
write_buffer.len = buf_len;
write_buffer.buf = buf->data();
// TODO(wtc): Remove the assertion after enough testing.
AssertEventNotSignaled(core_->write_overlapped_.hEvent);
DWORD num;
int rv = WSASend(socket_, &write_buffer, 1, &num, 0,
&core_->write_overlapped_, NULL);
if (rv == 0) {
if (ResetEventIfSignaled(core_->write_overlapped_.hEvent)) {
rv = static_cast<int>(num);
if (rv > buf_len || rv < 0) {
// It seems that some winsock interceptors report that more was written
// than was available. Treat this as an error. http://crbug.com/27870
LOG(ERROR) << "Detected broken LSP: Asked to write " << buf_len
<< " bytes, but " << rv << " bytes reported.";
return ERR_WINSOCK_UNEXPECTED_WRITTEN_BYTES;
}
base::StatsCounter write_bytes("tcp.write_bytes");
write_bytes.Add(rv);
if (rv > 0)
use_history_.set_was_used_to_convey_data();
net_log_.AddByteTransferEvent(NetLog::TYPE_SOCKET_BYTES_SENT, rv,
buf->data());
return rv;
}
} else {
int os_error = WSAGetLastError();
if (os_error != WSA_IO_PENDING) {
int net_error = MapSystemError(os_error);
net_log_.AddEvent(NetLog::TYPE_SOCKET_WRITE_ERROR,
CreateNetLogSocketErrorCallback(net_error, os_error));
return net_error;
}
}
waiting_write_ = true;
write_callback_ = callback;
core_->write_iobuffer_ = buf;
core_->write_buffer_length_ = buf_len;
core_->WatchForWrite();
return ERR_IO_PENDING;
}
bool TCPClientSocketWin::SetReceiveBufferSize(int32 size) {
DCHECK(CalledOnValidThread());
return SetSocketReceiveBufferSize(socket_, size);
}
bool TCPClientSocketWin::SetSendBufferSize(int32 size) {
DCHECK(CalledOnValidThread());
return SetSocketSendBufferSize(socket_, size);
}
bool TCPClientSocketWin::SetKeepAlive(bool enable, int delay) {
return SetTCPKeepAlive(socket_, enable, delay);
}
bool TCPClientSocketWin::SetNoDelay(bool no_delay) {
return DisableNagle(socket_, no_delay);
}
void TCPClientSocketWin::DisableOverlappedReads() {
g_disable_overlapped_reads = true;
}
void TCPClientSocketWin::LogConnectCompletion(int net_error) {
if (net_error == OK)
UpdateConnectionTypeHistograms(CONNECTION_ANY);
if (net_error != OK) {
net_log_.EndEventWithNetErrorCode(NetLog::TYPE_TCP_CONNECT, net_error);
return;
}
struct sockaddr_storage source_address;
socklen_t addrlen = sizeof(source_address);
int rv = getsockname(
socket_, reinterpret_cast<struct sockaddr*>(&source_address), &addrlen);
if (rv != 0) {
LOG(ERROR) << "getsockname() [rv: " << rv
<< "] error: " << WSAGetLastError();
NOTREACHED();
net_log_.EndEventWithNetErrorCode(NetLog::TYPE_TCP_CONNECT, rv);
return;
}
net_log_.EndEvent(
NetLog::TYPE_TCP_CONNECT,
CreateNetLogSourceAddressCallback(
reinterpret_cast<const struct sockaddr*>(&source_address),
sizeof(source_address)));
}
int TCPClientSocketWin::DoRead(IOBuffer* buf, int buf_len,
const CompletionCallback& callback) {
if (core_->disable_overlapped_reads_) {
if (!core_->non_blocking_reads_initialized_) {
WSAEventSelect(socket_, core_->read_overlapped_.hEvent,
FD_READ | FD_CLOSE);
core_->non_blocking_reads_initialized_ = true;
}
int rv = recv(socket_, buf->data(), buf_len, 0);
if (rv == SOCKET_ERROR) {
int os_error = WSAGetLastError();
if (os_error != WSAEWOULDBLOCK) {
int net_error = MapSystemError(os_error);
net_log_.AddEvent(NetLog::TYPE_SOCKET_READ_ERROR,
CreateNetLogSocketErrorCallback(net_error, os_error));
return net_error;
}
} else {
base::StatsCounter read_bytes("tcp.read_bytes");
if (rv > 0) {
use_history_.set_was_used_to_convey_data();
read_bytes.Add(rv);
}
net_log_.AddByteTransferEvent(NetLog::TYPE_SOCKET_BYTES_RECEIVED, rv,
buf->data());
return rv;
}
} else {
buf_len = core_->ThrottleReadSize(buf_len);
WSABUF read_buffer;
read_buffer.len = buf_len;
read_buffer.buf = buf->data();
// TODO(wtc): Remove the assertion after enough testing.
AssertEventNotSignaled(core_->read_overlapped_.hEvent);
DWORD num;
DWORD flags = 0;
int rv = WSARecv(socket_, &read_buffer, 1, &num, &flags,
&core_->read_overlapped_, NULL);
if (rv == 0) {
if (ResetEventIfSignaled(core_->read_overlapped_.hEvent)) {
base::StatsCounter read_bytes("tcp.read_bytes");
if (num > 0) {
use_history_.set_was_used_to_convey_data();
read_bytes.Add(num);
}
net_log_.AddByteTransferEvent(NetLog::TYPE_SOCKET_BYTES_RECEIVED, num,
buf->data());
return static_cast<int>(num);
}
} else {
int os_error = WSAGetLastError();
if (os_error != WSA_IO_PENDING) {
int net_error = MapSystemError(os_error);
net_log_.AddEvent(NetLog::TYPE_SOCKET_READ_ERROR,
CreateNetLogSocketErrorCallback(net_error, os_error));
return net_error;
}
}
}
waiting_read_ = true;
read_callback_ = callback;
core_->read_iobuffer_ = buf;
core_->read_buffer_length_ = buf_len;
core_->WatchForRead();
return ERR_IO_PENDING;
}
void TCPClientSocketWin::DoReadCallback(int rv) {
DCHECK_NE(rv, ERR_IO_PENDING);
DCHECK(!read_callback_.is_null());
// Since Run may result in Read being called, clear read_callback_ up front.
CompletionCallback c = read_callback_;
read_callback_.Reset();
c.Run(rv);
}
void TCPClientSocketWin::DoWriteCallback(int rv) {
DCHECK_NE(rv, ERR_IO_PENDING);
DCHECK(!write_callback_.is_null());
// since Run may result in Write being called, clear write_callback_ up front.
CompletionCallback c = write_callback_;
write_callback_.Reset();
c.Run(rv);
}
void TCPClientSocketWin::DidCompleteConnect() {
DCHECK_EQ(next_connect_state_, CONNECT_STATE_CONNECT_COMPLETE);
int result;
WSANETWORKEVENTS events;
int rv = WSAEnumNetworkEvents(socket_, core_->read_overlapped_.hEvent,
&events);
int os_error = 0;
if (rv == SOCKET_ERROR) {
NOTREACHED();
os_error = WSAGetLastError();
result = MapSystemError(os_error);
} else if (events.lNetworkEvents & FD_CONNECT) {
os_error = events.iErrorCode[FD_CONNECT_BIT];
result = MapConnectError(os_error);
} else {
NOTREACHED();
result = ERR_UNEXPECTED;
}
connect_os_error_ = os_error;
rv = DoConnectLoop(result);
if (rv != ERR_IO_PENDING) {
LogConnectCompletion(rv);
DoReadCallback(rv);
}
}
void TCPClientSocketWin::DidCompleteRead() {
DCHECK(waiting_read_);
DWORD num_bytes, flags;
BOOL ok = WSAGetOverlappedResult(socket_, &core_->read_overlapped_,
&num_bytes, FALSE, &flags);
waiting_read_ = false;
int rv;
if (ok) {
base::StatsCounter read_bytes("tcp.read_bytes");
read_bytes.Add(num_bytes);
if (num_bytes > 0)
use_history_.set_was_used_to_convey_data();
net_log_.AddByteTransferEvent(NetLog::TYPE_SOCKET_BYTES_RECEIVED,
num_bytes, core_->read_iobuffer_->data());
rv = static_cast<int>(num_bytes);
} else {
int os_error = WSAGetLastError();
rv = MapSystemError(os_error);
net_log_.AddEvent(NetLog::TYPE_SOCKET_READ_ERROR,
CreateNetLogSocketErrorCallback(rv, os_error));
}
WSAResetEvent(core_->read_overlapped_.hEvent);
core_->read_iobuffer_ = NULL;
core_->read_buffer_length_ = 0;
DoReadCallback(rv);
}
void TCPClientSocketWin::DidCompleteWrite() {
DCHECK(waiting_write_);
DWORD num_bytes, flags;
BOOL ok = WSAGetOverlappedResult(socket_, &core_->write_overlapped_,
&num_bytes, FALSE, &flags);
WSAResetEvent(core_->write_overlapped_.hEvent);
waiting_write_ = false;
int rv;
if (!ok) {
int os_error = WSAGetLastError();
rv = MapSystemError(os_error);
net_log_.AddEvent(NetLog::TYPE_SOCKET_WRITE_ERROR,
CreateNetLogSocketErrorCallback(rv, os_error));
} else {
rv = static_cast<int>(num_bytes);
if (rv > core_->write_buffer_length_ || rv < 0) {
// It seems that some winsock interceptors report that more was written
// than was available. Treat this as an error. http://crbug.com/27870
LOG(ERROR) << "Detected broken LSP: Asked to write "
<< core_->write_buffer_length_ << " bytes, but " << rv
<< " bytes reported.";
rv = ERR_WINSOCK_UNEXPECTED_WRITTEN_BYTES;
} else {
base::StatsCounter write_bytes("tcp.write_bytes");
write_bytes.Add(num_bytes);
if (num_bytes > 0)
use_history_.set_was_used_to_convey_data();
net_log_.AddByteTransferEvent(NetLog::TYPE_SOCKET_BYTES_SENT, num_bytes,
core_->write_iobuffer_->data());
}
}
core_->write_iobuffer_ = NULL;
DoWriteCallback(rv);
}
void TCPClientSocketWin::DidSignalRead() {
DCHECK(waiting_read_);
int os_error = 0;
WSANETWORKEVENTS network_events;
int rv = WSAEnumNetworkEvents(socket_, core_->read_overlapped_.hEvent,
&network_events);
if (rv == SOCKET_ERROR) {
os_error = WSAGetLastError();
rv = MapSystemError(os_error);
} else if (network_events.lNetworkEvents) {
DCHECK_EQ(network_events.lNetworkEvents & ~(FD_READ | FD_CLOSE), 0);
// If network_events.lNetworkEvents is FD_CLOSE and
// network_events.iErrorCode[FD_CLOSE_BIT] is 0, it is a graceful
// connection closure. It is tempting to directly set rv to 0 in
// this case, but the MSDN pages for WSAEventSelect and
// WSAAsyncSelect recommend we still call DoRead():
// FD_CLOSE should only be posted after all data is read from a
// socket, but an application should check for remaining data upon
// receipt of FD_CLOSE to avoid any possibility of losing data.
//
// If network_events.iErrorCode[FD_READ_BIT] or
// network_events.iErrorCode[FD_CLOSE_BIT] is nonzero, still call
// DoRead() because recv() reports a more accurate error code
// (WSAECONNRESET vs. WSAECONNABORTED) when the connection was
// reset.
rv = DoRead(core_->read_iobuffer_, core_->read_buffer_length_,
read_callback_);
if (rv == ERR_IO_PENDING)
return;
} else {
// This may happen because Read() may succeed synchronously and
// consume all the received data without resetting the event object.
core_->WatchForRead();
return;
}
waiting_read_ = false;
core_->read_iobuffer_ = NULL;
core_->read_buffer_length_ = 0;
DoReadCallback(rv);
}
} // namespace net<|fim▁end|> | |
<|file_name|>DefaultDirectedAcyclicGraph.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.graph;
import com.google.common.base.Preconditions;
public class DefaultDirectedAcyclicGraph<T> extends DefaultTraversableGraph<T>
implements DirectedAcyclicGraph<T> {
public DefaultDirectedAcyclicGraph(MutableDirectedGraph<T> graph) {
super(graph);
Preconditions.checkArgument(super.isAcyclic());
}
}<|fim▁end|> | * Unless required by applicable law or agreed to in writing, software |
<|file_name|>path_parsing.rs<|end_file_name|><|fim▁begin|>extern crate memchr;
use self::memchr::{memchr, memrchr};
use memrnchr::memrnchr;
use std::path::MAIN_SEPARATOR;
use std::str;<|fim▁hole|>pub const SEP: u8 = MAIN_SEPARATOR as u8;
lazy_static! {
pub static ref SEP_STR: &'static str = str::from_utf8(&[SEP]).unwrap();
}
// Returns the byte offset of the last byte that equals MAIN_SEPARATOR.
#[inline(always)]
pub fn find_last_sep_pos(bytes: &[u8]) -> Option<usize> {
memrchr(SEP, bytes)
}
// Returns the byte offset of the last byte that is not MAIN_SEPARATOR.
#[inline(always)]
pub fn find_last_non_sep_pos(bytes: &[u8]) -> Option<usize> {
memrnchr(SEP, bytes)
}
// Whether the given byte sequence contains a MAIN_SEPARATOR.
#[inline(always)]
pub fn contains_sep(bytes: &[u8]) -> bool {
memchr(SEP, bytes) != None
}<|fim▁end|> | |
<|file_name|>RpcClients4TaskerModule.java<|end_file_name|><|fim▁begin|>package dataengine.tasker;
import javax.jms.Connection;
import com.google.inject.Provides;
import dataengine.apis.DepJobService_I;
import dataengine.apis.RpcClientProvider;
import dataengine.apis.SessionsDB_I;
import dataengine.apis.CommunicationConsts;
import lombok.extern.slf4j.Slf4j;
import net.deelam.activemq.rpc.RpcClientsModule;
/// provides verticle clients used by Tasker service
@Slf4j
class RpcClients4TaskerModule extends RpcClientsModule {
// private final String depJobMgrBroadcastAddr;
public RpcClients4TaskerModule(Connection connection/*, String depJobMgrBroadcastAddr*/) {
super(connection);<|fim▁hole|> log.debug("VertxRpcClients4TaskerModule configured");
}
// @Provides
// RpcClientProvider<DepJobService_I> jobDispatcherRpcClient(){
// return new RpcClientProvider<>(getAmqClientSupplierFor(DepJobService_I.class, depJobMgrBroadcastAddr));
// }
@Provides
RpcClientProvider<SessionsDB_I> sessionsDbRpcClient(){
return new RpcClientProvider<>(getAmqClientSupplierFor(SessionsDB_I.class, CommunicationConsts.SESSIONDB_RPCADDR));
}
}<|fim▁end|> | // this.depJobMgrBroadcastAddr=depJobMgrBroadcastAddr;
//debug = true; |
<|file_name|>ExportLogsStatsOverHttpDUnitTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.geode.test.junit.rules.GfshShellConnectionRule;
import org.apache.geode.test.junit.categories.DistributedTest;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.util.Arrays;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
@Category(DistributedTest.class)
public class ExportLogsStatsOverHttpDUnitTest extends ExportLogsStatsDUnitTest {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
<|fim▁hole|> connector.connect(httpPort, GfshShellConnectionRule.PortType.http);
}
@Test
public void testExportWithDir() throws Exception {
connectIfNeeded();
File dir = temporaryFolder.newFolder();
// export the logs
connector.executeCommand("export logs --dir=" + dir.getAbsolutePath());
// verify that the message contains a path to the user.dir
String message = connector.getGfshOutput();
assertThat(message).contains("Logs exported to: ");
assertThat(message).contains(dir.getAbsolutePath());
String zipPath = getZipPathFromCommandResult(message);
Set<String> actualZipEntries =
new ZipFile(zipPath).stream().map(ZipEntry::getName).collect(Collectors.toSet());
assertThat(actualZipEntries).isEqualTo(expectedZipEntries);
// also verify that the zip file on locator is deleted
assertThat(Arrays.stream(locator.getWorkingDir().listFiles())
.filter(file -> file.getName().endsWith(".zip")).collect(Collectors.toSet())).isEmpty();
}
protected String getZipPathFromCommandResult(String message) {
return message.replaceAll("Logs exported to: ", "").trim();
}
}<|fim▁end|> | @Override
public void connectIfNeeded() throws Exception {
if (!connector.isConnected()) |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim:set et ts=4 sw=4:
"""Utility functions
@contact: Debian FTP Master <[email protected]>
@copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <[email protected]>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import commands
import email.Header
import os
import pwd
import select
import socket
import shutil
import sys
import tempfile
import traceback
import stat
import apt_inst
import apt_pkg
import time
import re
import email as modemail
import subprocess
from dbconn import DBConn, get_architecture, get_component, get_suite, \
get_override_type, Keyring, session_wrapper, \
get_active_keyring_paths, get_primary_keyring_path
from sqlalchemy import desc
from dak_exceptions import *
from gpg import SignedFile
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_taint_free, \
re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
re_is_orig_source
from formats import parse_format, validate_changes_format
from srcformats import get_format_from_string
from collections import defaultdict
################################################################################
default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
alias_cache = None #: Cache for email alias checks
key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
# (hashname, function, earliest_changes_version)
known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
# Monkeypatch commands.getstatusoutput as it may not return the correct exit
# code in lenny's Python. This also affects commands.getoutput and
# commands.getstatus.
def dak_getstatusoutput(cmd):
pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = pipe.stdout.read()
pipe.wait()
if output[-1:] == '\n':
output = output[:-1]
ret = pipe.wait()
if ret is None:
ret = 0
return ret, output
commands.getstatusoutput = dak_getstatusoutput
################################################################################
def html_escape(s):
""" Escape html chars """
return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
################################################################################
def open_file(filename, mode='r'):
"""
Open C{file}, return fileobject.
@type filename: string
@param filename: path/filename to open
@type mode: string
@param mode: open mode
@rtype: fileobject
@return: open fileobject
@raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
"""
try:
f = open(filename, mode)
except IOError:
raise CantOpenError(filename)
return f
################################################################################
def our_raw_input(prompt=""):
if prompt:
while 1:
try:
sys.stdout.write(prompt)
break
except IOError:
pass
sys.stdout.flush()
try:
ret = raw_input()
return ret
except EOFError:
sys.stderr.write("\nUser interrupt (^D).\n")
raise SystemExit
################################################################################
def extract_component_from_section(section, session=None):
component = ""
if section.find('/') != -1:
component = section.split('/')[0]
# Expand default component
if component == "":
comp = get_component(section, session)
if comp is None:
component = "main"
else:
component = comp.component_name
return (section, component)
################################################################################
def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
require_signature = True
if keyrings == None:
keyrings = []
require_signature = False
signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
contents = signed_file.contents
error = ""
changes = {}
# Split the lines in the input, keeping the linebreaks.
lines = contents.splitlines(True)
if len(lines) == 0:
raise ParseChangesError("[Empty changes file]")
# Reindex by line number so we can easily verify the format of
# .dsc files...
index = 0
indexed_lines = {}
for line in lines:
index += 1
indexed_lines[index] = line[:-1]
num_of_lines = len(indexed_lines.keys())
index = 0
first = -1
while index < num_of_lines:
index += 1
line = indexed_lines[index]
if line == "" and signing_rules == 1:
if index != num_of_lines:
raise InvalidDscError(index)
break
slf = re_single_line_field.match(line)
if slf:
field = slf.groups()[0].lower()
changes[field] = slf.groups()[1]
first = 1
continue
if line == " .":
changes[field] += '\n'
continue
mlf = re_multi_line_field.match(line)
if mlf:
if first == -1:
raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
if first == 1 and changes[field] != "":
changes[field] += '\n'
first = 0
changes[field] += mlf.groups()[0] + '\n'
continue
error += line
changes["filecontents"] = armored_contents
if changes.has_key("source"):
# Strip the source version in brackets from the source field,
# put it in the "source-version" field instead.
srcver = re_srchasver.search(changes["source"])
if srcver:
changes["source"] = srcver.group(1)
changes["source-version"] = srcver.group(2)
if error:
raise ParseChangesError(error)
return changes
################################################################################
def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
"""
Parses a changes file and returns a dictionary where each field is a
key. The mandatory first argument is the filename of the .changes
file.
signing_rules is an optional argument:
- If signing_rules == -1, no signature is required.
- If signing_rules == 0 (the default), a signature is required.
- If signing_rules == 1, it turns on the same strict format checking
as dpkg-source.
The rules for (signing_rules == 1)-mode are:
- The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
followed by any PGP header data and must end with a blank line.
- The data section must end with a blank line and must be followed by
"-----BEGIN PGP SIGNATURE-----".
"""
changes_in = open_file(filename)
content = changes_in.read()
changes_in.close()
try:
unicode(content, 'utf-8')
except UnicodeError:
raise ChangesUnicodeError("Changes file not proper utf-8")
changes = parse_deb822(content, signing_rules, keyrings=keyrings)
if not dsc_file:
# Finally ensure that everything needed for .changes is there
must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
missingfields=[]
for keyword in must_keywords:
if not changes.has_key(keyword.lower()):
missingfields.append(keyword)
if len(missingfields):
raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
return changes
################################################################################
def hash_key(hashname):
return '%ssum' % hashname
################################################################################
def create_hash(where, files, hashname, hashfunc):
"""
create_hash extends the passed files dict with the given hash by
iterating over all files on disk and passing them to the hashing
function given.
"""
rejmsg = []
for f in files.keys():
try:
file_handle = open_file(f)
except CantOpenError:
rejmsg.append("Could not open file %s for checksumming" % (f))
continue
files[f][hash_key(hashname)] = hashfunc(file_handle)
file_handle.close()
return rejmsg
################################################################################
def check_hash(where, files, hashname, hashfunc):
"""
check_hash checks the given hash in the files dict against the actual
files on disk. The hash values need to be present consistently in
all file entries. It does not modify its input in any way.
"""
rejmsg = []
for f in files.keys():
file_handle = None
try:
try:
file_handle = open_file(f)
# Check for the hash entry, to not trigger a KeyError.
if not files[f].has_key(hash_key(hashname)):
rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
where))
continue
# Actually check the hash for correctness.
if hashfunc(file_handle) != files[f][hash_key(hashname)]:
rejmsg.append("%s: %s check failed in %s" % (f, hashname,
where))
except CantOpenError:
# TODO: This happens when the file is in the pool.
# warn("Cannot open file %s" % f)
continue
finally:
if file_handle:
file_handle.close()
return rejmsg
################################################################################
def check_size(where, files):
"""
check_size checks the file sizes in the passed files dict against the
files on disk.
"""
rejmsg = []
for f in files.keys():
try:
entry = os.stat(f)
except OSError as exc:
if exc.errno == 2:
# TODO: This happens when the file is in the pool.
continue
raise
actual_size = entry[stat.ST_SIZE]
size = int(files[f]["size"])
if size != actual_size:
rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
% (f, actual_size, size, where))
return rejmsg
################################################################################
def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
"""
Verify that the files listed in the Files field of the .dsc are
those expected given the announced Format.
@type dsc_filename: string
@param dsc_filename: path of .dsc file
@type dsc: dict
@param dsc: the content of the .dsc parsed by C{parse_changes()}
@type dsc_files: dict
@param dsc_files: the file list returned by C{build_file_list()}
@rtype: list
@return: all errors detected
"""
rejmsg = []
# Parse the file if needed
if dsc is None:
dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
if dsc_files is None:
dsc_files = build_file_list(dsc, is_a_dsc=1)
# Ensure .dsc lists proper set of source files according to the format
# announced
has = defaultdict(lambda: 0)
ftype_lookup = (
(r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
(r'diff.gz', ('debian_diff',)),
(r'tar.gz', ('native_tar_gz', 'native_tar')),
(r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
(r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
(r'tar\.(gz|bz2|xz)', ('native_tar',)),
(r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
)
for f in dsc_files.keys():
m = re_issource.match(f)
if not m:
rejmsg.append("%s: %s in Files field not recognised as source."
% (dsc_filename, f))
continue
# Populate 'has' dictionary by resolving keys in lookup table
matched = False
for regex, keys in ftype_lookup:
if re.match(regex, m.group(3)):
matched = True
for key in keys:
has[key] += 1
break
# File does not match anything in lookup table; reject
if not matched:
reject("%s: unexpected source file '%s'" % (dsc_filename, f))
# Check for multiple files
for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
if has[file_type] > 1:
rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
# Source format specific tests
try:
format = get_format_from_string(dsc['format'])
rejmsg.extend([
'%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
])
except UnknownFormatError:
# Not an error here for now
pass
return rejmsg
################################################################################
def check_hash_fields(what, manifest):
"""
check_hash_fields ensures that there are no checksum fields in the
given dict that we do not know about.
"""
rejmsg = []
hashes = map(lambda x: x[0], known_hashes)
for field in manifest:
if field.startswith("checksums-"):
hashname = field.split("-",1)[1]
if hashname not in hashes:
rejmsg.append("Unsupported checksum field for %s "\
"in %s" % (hashname, what))
return rejmsg
################################################################################
def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
if format >= version:
# The version should contain the specified hash.
func = check_hash
# Import hashes from the changes
rejmsg = parse_checksums(".changes", files, changes, hashname)
if len(rejmsg) > 0:
return rejmsg
else:
# We need to calculate the hash because it can't possibly
# be in the file.
func = create_hash
return func(".changes", files, hashname, hashfunc)
# We could add the orig which might be in the pool to the files dict to
# access the checksums easily.<|fim▁hole|> ensure_dsc_hashes' task is to ensure that each and every *present* hash
in the dsc is correct, i.e. identical to the changes file and if necessary
the pool. The latter task is delegated to check_hash.
"""
rejmsg = []
if not dsc.has_key('Checksums-%s' % (hashname,)):
return rejmsg
# Import hashes from the dsc
parse_checksums(".dsc", dsc_files, dsc, hashname)
# And check it...
rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
return rejmsg
################################################################################
def parse_checksums(where, files, manifest, hashname):
rejmsg = []
field = 'checksums-%s' % hashname
if not field in manifest:
return rejmsg
for line in manifest[field].split('\n'):
if not line:
break
clist = line.strip().split(' ')
if len(clist) == 3:
checksum, size, checkfile = clist
else:
rejmsg.append("Cannot parse checksum line [%s]" % (line))
continue
if not files.has_key(checkfile):
# TODO: check for the file's entry in the original files dict, not
# the one modified by (auto)byhand and other weird stuff
# rejmsg.append("%s: not present in files but in checksums-%s in %s" %
# (file, hashname, where))
continue
if not files[checkfile]["size"] == size:
rejmsg.append("%s: size differs for files and checksums-%s entry "\
"in %s" % (checkfile, hashname, where))
continue
files[checkfile][hash_key(hashname)] = checksum
for f in files.keys():
if not files[f].has_key(hash_key(hashname)):
rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
return rejmsg
################################################################################
# Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
files = {}
# Make sure we have a Files: field to parse...
if not changes.has_key(field):
raise NoFilesFieldError
# Validate .changes Format: field
if not is_a_dsc:
validate_changes_format(parse_format(changes['format']), field)
includes_section = (not is_a_dsc) and field == "files"
# Parse each entry/line:
for i in changes[field].split('\n'):
if not i:
break
s = i.split()
section = priority = ""
try:
if includes_section:
(md5, size, section, priority, name) = s
else:
(md5, size, name) = s
except ValueError:
raise ParseChangesError(i)
if section == "":
section = "-"
if priority == "":
priority = "-"
(section, component) = extract_component_from_section(section)
files[name] = dict(size=size, section=section,
priority=priority, component=component)
files[name][hashname] = md5
return files
################################################################################
# see http://bugs.debian.org/619131
def build_package_list(dsc, session = None):
if not dsc.has_key("package-list"):
return {}
packages = {}
for line in dsc["package-list"].split("\n"):
if not line:
break
fields = line.split()
name = fields[0]
package_type = fields[1]
(section, component) = extract_component_from_section(fields[2])
priority = fields[3]
# Validate type if we have a session
if session and get_override_type(package_type, session) is None:
# Maybe just warn and ignore? exit(1) might be a bit hard...
utils.fubar("invalid type (%s) in Package-List." % (package_type))
if name not in packages or packages[name]["type"] == "dsc":
packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
return packages
################################################################################
def send_mail (message, filename=""):
"""sendmail wrapper, takes _either_ a message string or a file as arguments"""
# Check whether we're supposed to be sending mail
if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
return
# If we've been passed a string dump it into a temporary file
if message:
(fd, filename) = tempfile.mkstemp()
os.write (fd, message)
os.close (fd)
if Cnf.has_key("Dinstall::MailWhiteList") and \
Cnf["Dinstall::MailWhiteList"] != "":
message_in = open_file(filename)
message_raw = modemail.message_from_file(message_in)
message_in.close();
whitelist = [];
whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
try:
for line in whitelist_in:
if not re_whitespace_comment.match(line):
if re_re_mark.match(line):
whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
else:
whitelist.append(re.compile(re.escape(line.strip())))
finally:
whitelist_in.close()
# Fields to check.
fields = ["To", "Bcc", "Cc"]
for field in fields:
# Check each field
value = message_raw.get(field, None)
if value != None:
match = [];
for item in value.split(","):
(rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
mail_whitelisted = 0
for wr in whitelist:
if wr.match(email):
mail_whitelisted = 1
break
if not mail_whitelisted:
print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
continue
match.append(item)
# Doesn't have any mail in whitelist so remove the header
if len(match) == 0:
del message_raw[field]
else:
message_raw.replace_header(field, ', '.join(match))
# Change message fields in order if we don't have a To header
if not message_raw.has_key("To"):
fields.reverse()
for field in fields:
if message_raw.has_key(field):
message_raw[fields[-1]] = message_raw[field]
del message_raw[field]
break
else:
# Clean up any temporary files
# and return, as we removed all recipients.
if message:
os.unlink (filename);
return;
fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
os.write (fd, message_raw.as_string(True));
os.close (fd);
# Invoke sendmail
(result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
if (result != 0):
raise SendmailFailedError(output)
# Clean up any temporary files
if message:
os.unlink (filename)
################################################################################
def poolify (source, component):
if component:
component += '/'
if source[:3] == "lib":
return component + source[:4] + '/' + source + '/'
else:
return component + source[:1] + '/' + source + '/'
################################################################################
def move (src, dest, overwrite = 0, perms = 0o664):
if os.path.exists(dest) and os.path.isdir(dest):
dest_dir = dest
else:
dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
umask = os.umask(00000)
os.makedirs(dest_dir, 0o2775)
os.umask(umask)
#print "Moving %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
dest += '/' + os.path.basename(src)
# Don't overwrite unless forced to
if os.path.exists(dest):
if not overwrite:
fubar("Can't move %s to %s - file already exists." % (src, dest))
else:
if not os.access(dest, os.W_OK):
fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
shutil.copy2(src, dest)
os.chmod(dest, perms)
os.unlink(src)
def copy (src, dest, overwrite = 0, perms = 0o664):
if os.path.exists(dest) and os.path.isdir(dest):
dest_dir = dest
else:
dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
umask = os.umask(00000)
os.makedirs(dest_dir, 0o2775)
os.umask(umask)
#print "Copying %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
dest += '/' + os.path.basename(src)
# Don't overwrite unless forced to
if os.path.exists(dest):
if not overwrite:
raise FileExistsError
else:
if not os.access(dest, os.W_OK):
raise CantOverwriteError
shutil.copy2(src, dest)
os.chmod(dest, perms)
################################################################################
def where_am_i ():
res = socket.getfqdn()
database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
if database_hostname:
return database_hostname
else:
return res
def which_conf_file ():
if os.getenv('DAK_CONFIG'):
return os.getenv('DAK_CONFIG')
res = socket.getfqdn()
# In case we allow local config files per user, try if one exists
if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
homedir = os.getenv("HOME")
confpath = os.path.join(homedir, "/etc/dak.conf")
if os.path.exists(confpath):
apt_pkg.ReadConfigFileISC(Cnf,confpath)
# We are still in here, so there is no local config file or we do
# not allow local files. Do the normal stuff.
if Cnf.get("Config::" + res + "::DakConfig"):
return Cnf["Config::" + res + "::DakConfig"]
return default_config
def which_apt_conf_file ():
res = socket.getfqdn()
# In case we allow local config files per user, try if one exists
if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
homedir = os.getenv("HOME")
confpath = os.path.join(homedir, "/etc/dak.conf")
if os.path.exists(confpath):
apt_pkg.ReadConfigFileISC(Cnf,default_config)
if Cnf.get("Config::" + res + "::AptConfig"):
return Cnf["Config::" + res + "::AptConfig"]
else:
return default_apt_config
def which_alias_file():
hostname = socket.getfqdn()
aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
if os.path.exists(aliasfn):
return aliasfn
else:
return None
################################################################################
def TemplateSubst(subst_map, filename):
""" Perform a substition of template """
templatefile = open_file(filename)
template = templatefile.read()
for k, v in subst_map.iteritems():
template = template.replace(k, str(v))
templatefile.close()
return template
################################################################################
def fubar(msg, exit_code=1):
sys.stderr.write("E: %s\n" % (msg))
sys.exit(exit_code)
def warn(msg):
sys.stderr.write("W: %s\n" % (msg))
################################################################################
# Returns the user name with a laughable attempt at rfc822 conformancy
# (read: removing stray periods).
def whoami ():
return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
def getusername ():
return pwd.getpwuid(os.getuid())[0]
################################################################################
def size_type (c):
t = " B"
if c > 10240:
c = c / 1024
t = " KB"
if c > 10240:
c = c / 1024
t = " MB"
return ("%d%s" % (c, t))
################################################################################
def cc_fix_changes (changes):
o = changes.get("architecture", "")
if o:
del changes["architecture"]
changes["architecture"] = {}
for j in o.split():
changes["architecture"][j] = 1
def changes_compare (a, b):
""" Sort by source name, source version, 'have source', and then by filename """
try:
a_changes = parse_changes(a)
except:
return -1
try:
b_changes = parse_changes(b)
except:
return 1
cc_fix_changes (a_changes)
cc_fix_changes (b_changes)
# Sort by source name
a_source = a_changes.get("source")
b_source = b_changes.get("source")
q = cmp (a_source, b_source)
if q:
return q
# Sort by source version
a_version = a_changes.get("version", "0")
b_version = b_changes.get("version", "0")
q = apt_pkg.version_compare(a_version, b_version)
if q:
return q
# Sort by 'have source'
a_has_source = a_changes["architecture"].get("source")
b_has_source = b_changes["architecture"].get("source")
if a_has_source and not b_has_source:
return -1
elif b_has_source and not a_has_source:
return 1
# Fall back to sort by filename
return cmp(a, b)
################################################################################
def find_next_free (dest, too_many=100):
extra = 0
orig_dest = dest
while os.path.exists(dest) and extra < too_many:
dest = orig_dest + '.' + repr(extra)
extra += 1
if extra >= too_many:
raise NoFreeFilenameError
return dest
################################################################################
def result_join (original, sep = '\t'):
resultlist = []
for i in xrange(len(original)):
if original[i] == None:
resultlist.append("")
else:
resultlist.append(original[i])
return sep.join(resultlist)
################################################################################
def prefix_multi_line_string(str, prefix, include_blank_lines=0):
out = ""
for line in str.split('\n'):
line = line.strip()
if line or include_blank_lines:
out += "%s%s\n" % (prefix, line)
# Strip trailing new line
if out:
out = out[:-1]
return out
################################################################################
def validate_changes_file_arg(filename, require_changes=1):
"""
'filename' is either a .changes or .dak file. If 'filename' is a
.dak file, it's changed to be the corresponding .changes file. The
function then checks if the .changes file a) exists and b) is
readable and returns the .changes filename if so. If there's a
problem, the next action depends on the option 'require_changes'
argument:
- If 'require_changes' == -1, errors are ignored and the .changes
filename is returned.
- If 'require_changes' == 0, a warning is given and 'None' is returned.
- If 'require_changes' == 1, a fatal error is raised.
"""
error = None
orig_filename = filename
if filename.endswith(".dak"):
filename = filename[:-4]+".changes"
if not filename.endswith(".changes"):
error = "invalid file type; not a changes file"
else:
if not os.access(filename,os.R_OK):
if os.path.exists(filename):
error = "permission denied"
else:
error = "file not found"
if error:
if require_changes == 1:
fubar("%s: %s." % (orig_filename, error))
elif require_changes == 0:
warn("Skipping %s - %s" % (orig_filename, error))
return None
else: # We only care about the .dak file
return filename
else:
return filename
################################################################################
def real_arch(arch):
return (arch != "source" and arch != "all")
################################################################################
def join_with_commas_and(list):
if len(list) == 0: return "nothing"
if len(list) == 1: return list[0]
return ", ".join(list[:-1]) + " and " + list[-1]
################################################################################
def pp_deps (deps):
pp_deps = []
for atom in deps:
(pkg, version, constraint) = atom
if constraint:
pp_dep = "%s (%s %s)" % (pkg, constraint, version)
else:
pp_dep = pkg
pp_deps.append(pp_dep)
return " |".join(pp_deps)
################################################################################
def get_conf():
return Cnf
################################################################################
def parse_args(Options):
""" Handle -a, -c and -s arguments; returns them as SQL constraints """
# XXX: This should go away and everything which calls it be converted
# to use SQLA properly. For now, we'll just fix it not to use
# the old Pg interface though
session = DBConn().session()
# Process suite
if Options["Suite"]:
suite_ids_list = []
for suitename in split_args(Options["Suite"]):
suite = get_suite(suitename, session=session)
if not suite or suite.suite_id is None:
warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
else:
suite_ids_list.append(suite.suite_id)
if suite_ids_list:
con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
else:
fubar("No valid suite given.")
else:
con_suites = ""
# Process component
if Options["Component"]:
component_ids_list = []
for componentname in split_args(Options["Component"]):
component = get_component(componentname, session=session)
if component is None:
warn("component '%s' not recognised." % (componentname))
else:
component_ids_list.append(component.component_id)
if component_ids_list:
con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
else:
fubar("No valid component given.")
else:
con_components = ""
# Process architecture
con_architectures = ""
check_source = 0
if Options["Architecture"]:
arch_ids_list = []
for archname in split_args(Options["Architecture"]):
if archname == "source":
check_source = 1
else:
arch = get_architecture(archname, session=session)
if arch is None:
warn("architecture '%s' not recognised." % (archname))
else:
arch_ids_list.append(arch.arch_id)
if arch_ids_list:
con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
else:
if not check_source:
fubar("No valid architecture given.")
else:
check_source = 1
return (con_suites, con_architectures, con_components, check_source)
################################################################################
def arch_compare_sw (a, b):
"""
Function for use in sorting lists of architectures.
Sorts normally except that 'source' dominates all others.
"""
if a == "source" and b == "source":
return 0
elif a == "source":
return -1
elif b == "source":
return 1
return cmp (a, b)
################################################################################
def split_args (s, dwim=1):
"""
Split command line arguments which can be separated by either commas
or whitespace. If dwim is set, it will complain about string ending
in comma since this usually means someone did 'dak ls -a i386, m68k
foo' or something and the inevitable confusion resulting from 'm68k'
being treated as an argument is undesirable.
"""
if s.find(",") == -1:
return s.split()
else:
if s[-1:] == "," and dwim:
fubar("split_args: found trailing comma, spurious space maybe?")
return s.split(",")
################################################################################
def gpgv_get_status_output(cmd, status_read, status_write):
"""
Our very own version of commands.getouputstatus(), hacked to support
gpgv's status fd.
"""
cmd = ['/bin/sh', '-c', cmd]
p2cread, p2cwrite = os.pipe()
c2pread, c2pwrite = os.pipe()
errout, errin = os.pipe()
pid = os.fork()
if pid == 0:
# Child
os.close(0)
os.close(1)
os.dup(p2cread)
os.dup(c2pwrite)
os.close(2)
os.dup(errin)
for i in range(3, 256):
if i != status_write:
try:
os.close(i)
except:
pass
try:
os.execvp(cmd[0], cmd)
finally:
os._exit(1)
# Parent
os.close(p2cread)
os.dup2(c2pread, c2pwrite)
os.dup2(errout, errin)
output = status = ""
while 1:
i, o, e = select.select([c2pwrite, errin, status_read], [], [])
more_data = []
for fd in i:
r = os.read(fd, 8196)
if len(r) > 0:
more_data.append(fd)
if fd == c2pwrite or fd == errin:
output += r
elif fd == status_read:
status += r
else:
fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
if not more_data:
pid, exit_status = os.waitpid(pid, 0)
try:
os.close(status_write)
os.close(status_read)
os.close(c2pread)
os.close(c2pwrite)
os.close(p2cwrite)
os.close(errin)
os.close(errout)
except:
pass
break
return output, status, exit_status
################################################################################
def process_gpgv_output(status):
# Process the status-fd output
keywords = {}
internal_error = ""
for line in status.split('\n'):
line = line.strip()
if line == "":
continue
split = line.split()
if len(split) < 2:
internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
continue
(gnupg, keyword) = split[:2]
if gnupg != "[GNUPG:]":
internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
continue
args = split[2:]
if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
internal_error += "found duplicate status token ('%s').\n" % (keyword)
continue
else:
keywords[keyword] = args
return (keywords, internal_error)
################################################################################
def retrieve_key (filename, keyserver=None, keyring=None):
"""
Retrieve the key that signed 'filename' from 'keyserver' and
add it to 'keyring'. Returns nothing on success, or an error message
on error.
"""
# Defaults for keyserver and keyring
if not keyserver:
keyserver = Cnf["Dinstall::KeyServer"]
if not keyring:
keyring = get_primary_keyring_path()
# Ensure the filename contains no shell meta-characters or other badness
if not re_taint_free.match(filename):
return "%s: tainted filename" % (filename)
# Invoke gpgv on the file
status_read, status_write = os.pipe()
cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
(_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
# Process the status-fd output
(keywords, internal_error) = process_gpgv_output(status)
if internal_error:
return internal_error
if not keywords.has_key("NO_PUBKEY"):
return "didn't find expected NO_PUBKEY in gpgv status-fd output"
fingerprint = keywords["NO_PUBKEY"][0]
# XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
# it'll try to create a lockfile in /dev. A better solution might
# be a tempfile or something.
cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
% (Cnf["Dinstall::SigningKeyring"])
cmd += " --keyring %s --keyserver %s --recv-key %s" \
% (keyring, keyserver, fingerprint)
(result, output) = commands.getstatusoutput(cmd)
if (result != 0):
return "'%s' failed with exit code %s" % (cmd, result)
return ""
################################################################################
def gpg_keyring_args(keyrings=None):
if not keyrings:
keyrings = get_active_keyring_paths()
return " ".join(["--keyring %s" % x for x in keyrings])
################################################################################
@session_wrapper
def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
"""
Check the signature of a file and return the fingerprint if the
signature is valid or 'None' if it's not. The first argument is the
filename whose signature should be checked. The second argument is a
reject function and is called when an error is found. The reject()
function must allow for two arguments: the first is the error message,
the second is an optional prefix string. It's possible for reject()
to be called more than once during an invocation of check_signature().
The third argument is optional and is the name of the files the
detached signature applies to. The fourth argument is optional and is
a *list* of keyrings to use. 'autofetch' can either be None, True or
False. If None, the default behaviour specified in the config will be
used.
"""
rejects = []
# Ensure the filename contains no shell meta-characters or other badness
if not re_taint_free.match(sig_filename):
rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
return (None, rejects)
if data_filename and not re_taint_free.match(data_filename):
rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
return (None, rejects)
if not keyrings:
keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
# Autofetch the signing key if that's enabled
if autofetch == None:
autofetch = Cnf.get("Dinstall::KeyAutoFetch")
if autofetch:
error_msg = retrieve_key(sig_filename)
if error_msg:
rejects.append(error_msg)
return (None, rejects)
# Build the command line
status_read, status_write = os.pipe()
cmd = "gpgv --status-fd %s %s %s %s" % (
status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
# Invoke gpgv on the file
(output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
# Process the status-fd output
(keywords, internal_error) = process_gpgv_output(status)
# If we failed to parse the status-fd output, let's just whine and bail now
if internal_error:
rejects.append("internal error while performing signature check on %s." % (sig_filename))
rejects.append(internal_error, "")
rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
return (None, rejects)
# Now check for obviously bad things in the processed output
if keywords.has_key("KEYREVOKED"):
rejects.append("The key used to sign %s has been revoked." % (sig_filename))
if keywords.has_key("BADSIG"):
rejects.append("bad signature on %s." % (sig_filename))
if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
rejects.append("failed to check signature on %s." % (sig_filename))
if keywords.has_key("NO_PUBKEY"):
args = keywords["NO_PUBKEY"]
if len(args) >= 1:
key = args[0]
rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
if keywords.has_key("BADARMOR"):
rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
if keywords.has_key("NODATA"):
rejects.append("no signature found in %s." % (sig_filename))
if keywords.has_key("EXPKEYSIG"):
args = keywords["EXPKEYSIG"]
if len(args) >= 1:
key = args[0]
rejects.append("Signature made by expired key 0x%s" % (key))
if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
args = keywords["KEYEXPIRED"]
expiredate=""
if len(args) >= 1:
timestamp = args[0]
if timestamp.count("T") == 0:
try:
expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
except ValueError:
expiredate = "unknown (%s)" % (timestamp)
else:
expiredate = timestamp
rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
if len(rejects) > 0:
return (None, rejects)
# Next check gpgv exited with a zero return code
if exit_status:
rejects.append("gpgv failed while checking %s." % (sig_filename))
if status.strip():
rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
else:
rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
return (None, rejects)
# Sanity check the good stuff we expect
if not keywords.has_key("VALIDSIG"):
rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
else:
args = keywords["VALIDSIG"]
if len(args) < 1:
rejects.append("internal error while checking signature on %s." % (sig_filename))
else:
fingerprint = args[0]
if not keywords.has_key("GOODSIG"):
rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
if not keywords.has_key("SIG_ID"):
rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
# Finally ensure there's not something we don't recognise
known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
for keyword in keywords.keys():
if not known_keywords.has_key(keyword):
rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
if len(rejects) > 0:
return (None, rejects)
else:
return (fingerprint, [])
################################################################################
def gpg_get_key_addresses(fingerprint):
"""retreive email addresses from gpg key uids for a given fingerprint"""
addresses = key_uid_email_cache.get(fingerprint)
if addresses != None:
return addresses
addresses = list()
cmd = "gpg --no-default-keyring %s --fingerprint %s" \
% (gpg_keyring_args(), fingerprint)
(result, output) = commands.getstatusoutput(cmd)
if result == 0:
for l in output.split('\n'):
m = re_gpg_uid.match(l)
if m:
addresses.append(m.group(1))
key_uid_email_cache[fingerprint] = addresses
return addresses
################################################################################
def clean_symlink (src, dest, root):
"""
Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
Returns fixed 'src'
"""
src = src.replace(root, '', 1)
dest = dest.replace(root, '', 1)
dest = os.path.dirname(dest)
new_src = '../' * len(dest.split('/'))
return new_src + src
################################################################################
def temp_filename(directory=None, prefix="dak", suffix=""):
"""
Return a secure and unique filename by pre-creating it.
If 'directory' is non-null, it will be the directory the file is pre-created in.
If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
If 'suffix' is non-null, the filename will end with it.
Returns a pair (fd, name).
"""
return tempfile.mkstemp(suffix, prefix, directory)
################################################################################
def temp_dirname(parent=None, prefix="dak", suffix=""):
"""
Return a secure and unique directory by pre-creating it.
If 'parent' is non-null, it will be the directory the directory is pre-created in.
If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
If 'suffix' is non-null, the filename will end with it.
Returns a pathname to the new directory
"""
return tempfile.mkdtemp(suffix, prefix, parent)
################################################################################
def is_email_alias(email):
""" checks if the user part of the email is listed in the alias file """
global alias_cache
if alias_cache == None:
aliasfn = which_alias_file()
alias_cache = set()
if aliasfn:
for l in open(aliasfn):
alias_cache.add(l.split(':')[0])
uid = email.split('@')[0]
return uid in alias_cache
################################################################################
def get_changes_files(from_dir):
"""
Takes a directory and lists all .changes files in it (as well as chdir'ing
to the directory; this is due to broken behaviour on the part of p-u/p-a
when you're not in the right place)
Returns a list of filenames
"""
try:
# Much of the rest of p-u/p-a depends on being in the right place
os.chdir(from_dir)
changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
except OSError as e:
fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
return changes_files
################################################################################
apt_pkg.init()
Cnf = apt_pkg.Configuration()
if not os.getenv("DAK_TEST"):
apt_pkg.read_config_file_isc(Cnf,default_config)
if which_conf_file() != default_config:
apt_pkg.read_config_file_isc(Cnf,which_conf_file())
################################################################################
def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
"""
Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
Well, actually it parsed a local copy, but let's document the source
somewhere ;)
returns a dict associating source package name with a list of open wnpp
bugs (Yes, there might be more than one)
"""
line = []
try:
f = open(file)
lines = f.readlines()
except IOError as e:
print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
lines = []
wnpp = {}
for line in lines:
splited_line = line.split(": ", 1)
if len(splited_line) > 1:
wnpp[splited_line[0]] = splited_line[1].split("|")
for source in wnpp.keys():
bugs = []
for wnpp_bug in wnpp[source]:
bug_no = re.search("(\d)+", wnpp_bug).group()
if bug_no:
bugs.append(bug_no)
wnpp[source] = bugs
return wnpp
################################################################################
def get_packages_from_ftp(root, suite, component, architecture):
"""
Returns an object containing apt_pkg-parseable data collected by
aggregating Packages.gz files gathered for each architecture.
@type root: string
@param root: path to ftp archive root directory
@type suite: string
@param suite: suite to extract files from
@type component: string
@param component: component to extract files from
@type architecture: string
@param architecture: architecture to extract files from
@rtype: TagFile
@return: apt_pkg class containing package data
"""
filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
(fd, temp_file) = temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
if (result != 0):
fubar("Gunzip invocation failed!\n%s\n" % (output), result)
filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
if os.path.exists(filename):
(result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
if (result != 0):
fubar("Gunzip invocation failed!\n%s\n" % (output), result)
packages = open_file(temp_file)
Packages = apt_pkg.ParseTagFile(packages)
os.unlink(temp_file)
return Packages
################################################################################
def deb_extract_control(fh):
"""extract DEBIAN/control from a binary package"""
return apt_inst.DebFile(fh).control.extractdata("control")
################################################################################
def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
"""Mail addresses to contact for an upload
Args:
maintainer (str): Maintainer field of the changes file
changed_by (str): Changed-By field of the changes file
fingerprint (str): Fingerprint of the PGP key used to sign the upload
Returns:
List of RFC 2047-encoded mail addresses to contact regarding this upload
"""
addresses = [maintainer]
if changed_by != maintainer:
addresses.append(changed_by)
fpr_addresses = gpg_get_key_addresses(fingerprint)
if fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
addresses.append(fpr_addresses[0])
encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
return encoded_addresses<|fim▁end|> |
def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
""" |
<|file_name|>peer_test.go<|end_file_name|><|fim▁begin|>package peer
import (
"os/exec"
"testing"
"github.com/bmizerany/assert"
"github.com/soundcloud/doozer"
"github.com/zyxar/doozerd/store"
)
func TestDoozerNop(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
err := cl.Nop()
assert.Equal(t, nil, err)
}
func TestDoozerGet(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
_, err := cl.Set("/x", store.Missing, []byte{'a'})
assert.Equal(t, nil, err)
ents, rev, err := cl.Get("/x", nil)
assert.Equal(t, nil, err)
assert.NotEqual(t, store.Dir, rev)
assert.Equal(t, []byte{'a'}, ents)
//cl.Set("/test/a", store.Missing, []byte{'1'})
//cl.Set("/test/b", store.Missing, []byte{'2'})
//cl.Set("/test/c", store.Missing, []byte{'3'})
//ents, rev, err = cl.Get("/test", 0)
//sort.SortStrings(ents)
//assert.Equal(t, store.Dir, rev)
//assert.Equal(t, nil, err)
//assert.Equal(t, []string{"a", "b", "c"}, ents)
}
func TestDoozerSet(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
for i := byte(0); i < 10; i++ {
_, err := cl.Set("/x", store.Clobber, []byte{'0' + i})
assert.Equal(t, nil, err)
}
_, err := cl.Set("/x", 0, []byte{'X'})
assert.Equal(t, &doozer.Error{doozer.ErrOldRev, ""}, err)
}
func TestDoozerGetWithRev(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
rev1, err := cl.Set("/x", store.Missing, []byte{'a'})
assert.Equal(t, nil, err)
v, rev, err := cl.Get("/x", &rev1) // Use the snapshot.
assert.Equal(t, nil, err)
assert.Equal(t, rev1, rev)
assert.Equal(t, []byte{'a'}, v)
rev2, err := cl.Set("/x", rev, []byte{'b'})
assert.Equal(t, nil, err)
v, rev, err = cl.Get("/x", nil) // Read the new value.
assert.Equal(t, nil, err)
assert.Equal(t, rev2, rev)
assert.Equal(t, []byte{'b'}, v)
v, rev, err = cl.Get("/x", &rev1) // Read the saved value again.
assert.Equal(t, nil, err)
assert.Equal(t, rev1, rev)
assert.Equal(t, []byte{'a'}, v)
}
func TestDoozerWaitSimple(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
var rev int64 = 1
cl.Set("/test/foo", store.Clobber, []byte("bar"))
ev, err := cl.Wait("/test/**", rev)
assert.Equal(t, nil, err)
assert.Equal(t, "/test/foo", ev.Path)
assert.Equal(t, []byte("bar"), ev.Body)
assert.T(t, ev.IsSet())
rev = ev.Rev + 1
cl.Set("/test/fun", store.Clobber, []byte("house"))
ev, err = cl.Wait("/test/**", rev)
assert.Equal(t, nil, err)
assert.Equal(t, "/test/fun", ev.Path)
assert.Equal(t, []byte("house"), ev.Body)
assert.T(t, ev.IsSet())
rev = ev.Rev + 1
cl.Del("/test/foo", store.Clobber)
ev, err = cl.Wait("/test/**", rev)
assert.Equal(t, nil, err)
assert.Equal(t, "/test/foo", ev.Path)
assert.T(t, ev.IsDel())
}
func TestDoozerWaitWithRev(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
// Create some history
cl.Set("/test/foo", store.Clobber, []byte("bar"))
cl.Set("/test/fun", store.Clobber, []byte("house"))
ev, err := cl.Wait("/test/**", 1)
assert.Equal(t, nil, err)
assert.Equal(t, "/test/foo", ev.Path)
assert.Equal(t, []byte("bar"), ev.Body)
assert.T(t, ev.IsSet())
rev := ev.Rev + 1
ev, err = cl.Wait("/test/**", rev)
assert.Equal(t, nil, err)
assert.Equal(t, "/test/fun", ev.Path)
assert.Equal(t, []byte("house"), ev.Body)
assert.T(t, ev.IsSet())
}
func TestDoozerStat(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
cl.Set("/test/foo", store.Clobber, []byte("bar"))
setRev, _ := cl.Set("/test/fun", store.Clobber, []byte("house"))
ln, rev, err := cl.Stat("/test", nil)
assert.Equal(t, nil, err)
assert.Equal(t, store.Dir, rev)
assert.Equal(t, int(2), ln)
ln, rev, err = cl.Stat("/test/fun", nil)
assert.Equal(t, nil, err)
assert.Equal(t, setRev, rev)
assert.Equal(t, int(5), ln)
}
func TestDoozerGetdirOnDir(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
cl.Set("/test/a", store.Clobber, []byte("1"))
cl.Set("/test/b", store.Clobber, []byte("2"))
cl.Set("/test/c", store.Clobber, []byte("3"))
rev, err := cl.Rev()
if err != nil {
panic(err)
}
got, err := cl.Getdir("/test", rev, 0, -1)
assert.Equal(t, nil, err)
assert.Equal(t, []string{"a", "b", "c"}, got)
}
func TestDoozerGetdirOnFile(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
cl.Set("/test/a", store.Clobber, []byte("1"))
rev, err := cl.Rev()
if err != nil {
panic(err)
}
names, err := cl.Getdir("/test/a", rev, 0, -1)
assert.Equal(t, &doozer.Error{doozer.ErrNotDir, ""}, err)
assert.Equal(t, []string(nil), names)
}
func TestDoozerGetdirMissing(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
rev, err := cl.Rev()
if err != nil {
panic(err)<|fim▁hole|> assert.Equal(t, &doozer.Error{doozer.ErrNoEnt, ""}, err)
assert.Equal(t, []string(nil), names)
}
func TestDoozerGetdirOffsetLimit(t *testing.T) {
var (
l = mustListen()
u = mustListenUDP(l.Addr().String())
st = store.New(store.DefaultInitialRev)
)
defer l.Close()
defer u.Close()
go Main("a", "X", "", "", "", nil, u, l, nil, 1e9, 2e9, 3e9, 101, st)
cl := dial(l.Addr().String())
cl.Set("/test/a", store.Clobber, []byte("1"))
cl.Set("/test/b", store.Clobber, []byte("2"))
cl.Set("/test/c", store.Clobber, []byte("3"))
cl.Set("/test/d", store.Clobber, []byte("4"))
rev, err := cl.Rev()
if err != nil {
panic(err)
}
names, err := cl.Getdir("/test", rev, 1, 2)
assert.Equal(t, nil, err)
assert.Equal(t, []string{"b", "c"}, names)
}
// TODO(xla) Needs to be revised but liklely obsolete since the short-circuit
// of consnesus in 8a296df.
// func TestPeerShun(t *testing.T) {
// l0 := mustListen()
// defer l0.Close()
// a0 := l0.Addr().String()
// u0 := mustListenUDP(a0)
// defer u0.Close()
//
// l1 := mustListen()
// defer l1.Close()
// u1 := mustListenUDP(l1.Addr().String())
// defer u1.Close()
// l2 := mustListen()
// defer l2.Close()
// u2 := mustListenUDP(l2.Addr().String())
// defer u2.Close()
//
// go Main("a", "X", "", "", "", nil, u0, l0, nil, 1e8, 1e7, 1e9, 1e9, store.New(store.DefaultInitialRev))
// go Main("a", "Y", "", "", "", dial(a0), u1, l1, nil, 1e8, 1e7, 1e9, 1e9, store.New(store.DefaultInitialRev))
// go Main("a", "Z", "", "", "", dial(a0), u2, l2, nil, 1e8, 1e7, 1e9, 1e9, store.New(store.DefaultInitialRev))
//
// cl := dial(l0.Addr().String())
// cl.Set("/ctl/cal/1", store.Missing, nil)
// cl.Set("/ctl/cal/2", store.Missing, nil)
//
// waitFor(cl, "/ctl/node/X/writable")
// waitFor(cl, "/ctl/node/Y/writable")
// waitFor(cl, "/ctl/node/Z/writable")
//
// rev, err := cl.Set("/test", store.Clobber, nil)
// if e, ok := err.(*doozer.Error); ok && e.Err == doozer.ErrReadonly {
// } else if err != nil {
// panic(err)
// }
//
// u1.Close()
// for {
// ev, err := cl.Wait("/ctl/cal/*", rev)
// if err != nil {
// panic(err)
// }
// if ev.IsSet() && len(ev.Body) == 0 {
// break
// }
// rev = ev.Rev + 1
// }
// }
// TODO(xla) Needs to be revised but liklely obsolete since the short-circuit
// of consnesus in 8a296df.
// func TestPeerLateJoin(t *testing.T) {
// l0 := mustListen()
// defer l0.Close()
// a0 := l0.Addr().String()
// u0 := mustListenUDP(a0)
// defer u0.Close()
//
// l1 := mustListen()
// defer l1.Close()
// u1 := mustListenUDP(l1.Addr().String())
// defer u1.Close()
//
// go Main("a", "X", "", "", "", nil, u0, l0, nil, 1e8, 1e7, 1e9, 60, store.New(store.DefaultInitialRev))
//
// cl := dial(l0.Addr().String())
// waitFor(cl, "/ctl/node/X/writable")
//
// // TODO: this is set slightly higher than the hardcoded interval
// // at which a store is cleaned. Refactor that to be configurable
// // so we can drop this down to something reasonable
// time.Sleep(1100 * time.Millisecond)
//
// go Main("a", "Y", "", "", "", dial(a0), u1, l1, nil, 1e8, 1e7, 1e9, 60, store.New(store.DefaultInitialRev))
// rev, _ := cl.Set("/ctl/cal/1", store.Missing, nil)
// for {
// ev, err := cl.Wait("/ctl/node/Y/writable", rev)
// if err != nil {
// panic(err)
// }
// if ev.IsSet() && len(ev.Body) == 4 {
// break
// }
// rev = ev.Rev + 1
// }
// }
func assertDenied(t *testing.T, err error) {
assert.NotEqual(t, nil, err)
assert.Equal(t, doozer.ErrOther, err.(*doozer.Error).Err)
assert.Equal(t, "permission denied", err.(*doozer.Error).Detail)
}
func runDoozer(a ...string) *exec.Cmd {
path := "/home/kr/src/go/bin/doozerd"
args := append([]string{path}, a...)
c := exec.Command(path, args...)
if err := c.Run(); err != nil {
panic(err)
}
return c
}<|fim▁end|> | }
names, err := cl.Getdir("/not/here", rev, 0, -1) |
<|file_name|>ActivityCollector.java<|end_file_name|><|fim▁begin|>package com.example.mywechat.utils;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
/**
* ActivityCollector ÀàÓÃÓÚ¹ÜÀíËùÓеĻ
* @author dzhiqin
*
*/
public class ActivityCollector {
public static List<Activity> activities=new ArrayList<Activity>();
public static void addActivity(Activity activity){
activities.add(activity);
}
public static void removeActivity(Activity activity){
activities.remove(activity);
}
/**
* ¹Ø±ÕËùÓл
*/
public static void finishAll(){
for(Activity activity:activities){
if(!activity.isFinishing()){
activity.finish();
}
}
}
public ActivityCollector() {
// TODO ×Ô¶¯Éú³ÉµÄ¹¹Ô캯Êý´æ¸ù
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>cmilitwostateselect.cpp<|end_file_name|><|fim▁begin|>#include "cmilitwostateselect.h"
#include "ui_cmilitwostateselect.h"
#include "cengine.h"
#include "ctextout.h"
void CMili2McuController::DoUpdateLogicView(const CEngineModel *engine) {
if (engine->CurrentMcuType() == CEngineModel::MILI_MCU)
mView->UpdateLogicView(engine);
}
void CMili2McuController::DoUpdateMemoryView(const CEngineModel *engine) {
if (engine->CurrentMcuType() == CEngineModel::MILI_MCU)
mView->UpdateMemoryView(engine);
}
void CMili2McuController::DoUpdateHintsView(const CEngineModel *engine) {
if (engine->CurrentMcuType() == CEngineModel::MILI_MCU)
mView->UpdateHintsView(engine);
}
// -------------------------------------------------------------
CMiliTwoStateSelect::CMiliTwoStateSelect(QWidget *parent) :<|fim▁hole|> ui->setupUi(this);
}
CMiliTwoStateSelect::~CMiliTwoStateSelect()
{
delete ui;
}
void CMiliTwoStateSelect::UpdateMemoryView(const CEngineModel *engine) {
Q_ASSERT(engine != 0);
Q_ASSERT(engine->CurrentMcuType() == CEngineModel::MILI_MCU);
UpdateRG(engine);
}
void CMiliTwoStateSelect::UpdateLogicView(const CEngineModel *engine) {
Q_ASSERT(engine != 0);
Q_ASSERT(engine->CurrentMcuType() == CEngineModel::MILI_MCU);
UpdateMS1(engine);
UpdateMS2(engine);
UpdateY(engine);
}
void CMiliTwoStateSelect::UpdateHintsView(const CEngineModel *engine) {
Q_ASSERT(engine != 0);
Q_ASSERT(engine->CurrentMcuType() == CEngineModel::MILI_MCU);
ui->mRgMsbNoHint->setText(CTextOut::FormatDec(engine->StateDim() - 1));
ui->mYDimHint->setText(CTextOut::FormatDec(engine->McuControlOutputDim()));
ui->mMs1MsbHint->setText(QString("p%1").arg(CTextOut::FormatDec(engine->McuControlInputDim() - 1)));
ui->mMs2MsbHint->setText(CTextOut::FormatDec(engine->McuControlOutputDim() + engine->StateDim() - 1));
}
void CMiliTwoStateSelect::UpdateRG(const CEngineModel *engine) {
Q_ASSERT(engine != 0);
Q_ASSERT(engine->CurrentMcuType() == CEngineModel::MILI_MCU);
const CRegister *rg = engine->CurrentMcu()->Register(CMiliAutomate::STATE_REGISTER_INDEX);
unsigned int stateDim = engine->StateDim();
ui->mRgVal->setText(CTextOut::FormatHex(rg->Output(), stateDim));
ui->mSVal->setText(CTextOut::FormatHex(rg->Output(), stateDim));
}
void CMiliTwoStateSelect::UpdateMS1(const CEngineModel *engine) {
Q_ASSERT(engine != 0);
Q_ASSERT(engine->CurrentMcuType() == CEngineModel::MILI_MCU);
const CMultiplexor *mux = engine->CurrentMcu()->Multiplexor(CMiliAutomate::GROUP_MUX_INDEX);
ui->mMS1P0Val->setText(CTextOut::FormatBin(mux->Input(0), 1));
ui->mMS1P1Val->setText(CTextOut::FormatBin(mux->Input(1), 1));
unsigned int lastIndex = engine->McuControlInputDim() - 1;
ui->mMS1PnVal->setText(CTextOut::FormatBin(mux->Input(lastIndex).AsInt(), 1));
ui->mMs1SelVal->setText(CTextOut::FormatHex(mux->InputIndex(), mux->IndexDim()));
ui->mMs1Val->setText(CTextOut::FormatBin(mux->Output().AsInt(), 1));
}
void CMiliTwoStateSelect::UpdateMS2(const CEngineModel *engine) {
Q_ASSERT(engine != 0);
Q_ASSERT(engine->CurrentMcuType() == CEngineModel::MILI_MCU);
const CMultiplexor *mux = engine->CurrentMcu()->Multiplexor(CMiliAutomate::STATE_MUX_INDEX);
ui->mMS2S0Val->setText(CTextOut::FormatHex(mux->Input(0)));
ui->mMS2S1Val->setText(CTextOut::FormatHex(mux->Input(1)));
ui->mMs2SVal->setText(CTextOut::FormatHex(mux->Output()));
mux = engine->CurrentMcu()->Multiplexor(CMiliAutomate::CONTROL_MUX_INDEX);
ui->mMS2Y0Val->setText(CTextOut::FormatHex(mux->Input(0)));
ui->mMS2Y1Val->setText(CTextOut::FormatHex(mux->Input(1)));
ui->mMs2YVal->setText(CTextOut::FormatHex(mux->Output()));
}
void CMiliTwoStateSelect::UpdateY(const CEngineModel *engine) {
Q_ASSERT(engine != 0);
Q_ASSERT(engine->CurrentMcuType() == CEngineModel::MILI_MCU);
ui->mYVal->setText(CTextOut::FormatHex(engine->CurrentMcu()->ControlOutput()));
}<|fim▁end|> | QWidget(parent),
ui(new Ui::CMiliTwoStateSelect),
mMcuController(this)
{ |
<|file_name|>PriceData.java<|end_file_name|><|fim▁begin|>package info.faceland.loot.data;
public class PriceData {
private int price;
private boolean rare;
public PriceData (int price, boolean rare) {
this.price = price;<|fim▁hole|> public int getPrice() {
return price;
}
public void setPrice(int price) {
this.price = price;
}
public boolean isRare() {
return rare;
}
public void setRare(boolean rare) {
this.rare = rare;
}
}<|fim▁end|> | this.rare = rare;
}
|
<|file_name|>dbtest.go<|end_file_name|><|fim▁begin|>package dbtest
import (
"database/sql"
"flag"
"os"
"testing"
)
var DatabaseURL = flag.String("db.url", getenv("TEST_DATABASE_URL", "postgres://localhost/empire?sslmode=disable"), "A connection string where a postgres instance is running.")<|fim▁hole|> db, err := sql.Open("postgres", *DatabaseURL)
if err != nil {
t.Fatal(err)
}
return db
}
func getenv(key, fallback string) string {
v, ok := os.LookupEnv(key)
if ok {
return v
}
return fallback
}<|fim▁end|> |
func Open(t testing.TB) *sql.DB { |
<|file_name|>svf_dpi.cpp<|end_file_name|><|fim▁begin|>/*
* svf_dpi.cpp
*
* Created on: Jan 7, 2014
* Author: ballance
*/
#include "svf_runtest.h"<|fim▁hole|>{
if (name == 0 || *name == 0) {
name == 0;
}
svf_runtest(name);
return 0;
}<|fim▁end|> |
extern "C" int svf_dpi_run_test(const char *name);
int svf_dpi_run_test(const char *name) |
<|file_name|>using_module_exports_b.js<|end_file_name|><|fim▁begin|><|fim▁hole|>module.exports = {
b: function() {
return A.a()
}
};<|fim▁end|> | A = require('./using_module_exports_a');
|
<|file_name|>SessionKind.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package delsh.livy;
/**
* The enumeration types for session kind.
*/
public enum SessionKind {
// Possible values are the following.
SPARK("spark"),
PYSPARK("pyspark"),<|fim▁hole|>
private String kind;
private SessionKind(String skind) {
kind = skind;
}
public String toString() {
return kind;
}
/**
* This class finds enum value that is equivalent to a given string.
* @param kind Session kind.
* @return Enum value
*/
public static SessionKind getEnum(String str) {
SessionKind[] array = SessionKind.values();
for(SessionKind enumStr : array) {
if(str.equals(enumStr.kind.toString())) {
return enumStr;
}
}
return null;
}
}<|fim▁end|> | SPARKR("sparkr"); |
<|file_name|>IgnorePlugin.js<|end_file_name|><|fim▁begin|>/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
class IgnorePlugin {
constructor(resourceRegExp, contextRegExp) {
this.resourceRegExp = resourceRegExp;
this.contextRegExp = contextRegExp;
this.checkIgnore = this.checkIgnore.bind(this);
}
/*
* Only returns true if a "resourceRegExp" exists
* and the resource given matches the regexp.
*/
checkResource(resource) {
if(!this.resourceRegExp) {
return false;
}
return this.resourceRegExp.test(resource);
}
/*
* Returns true if contextRegExp does not exist
* or if context matches the given regexp.
*/
checkContext(context) {
if(!this.contextRegExp) {
return true;
}
return this.contextRegExp.test(context);
}
/*
* Returns true if result should be ignored.
* false if it shouldn't.
*
* Not that if "contextRegExp" is given, both the "resourceRegExp"
* and "contextRegExp" have to match.
*/
checkResult(result) {
if(!result) {
return true;
}
<|fim▁hole|> // check if result is ignored
if(this.checkResult(result)) {
return callback();
}
return callback(null, result);
}
apply(compiler) {
compiler.plugin("normal-module-factory", (nmf) => {
nmf.plugin("before-resolve", this.checkIgnore);
});
compiler.plugin("context-module-factory", (cmf) => {
cmf.plugin("before-resolve", this.checkIgnore);
});
}
}
module.exports = IgnorePlugin;<|fim▁end|> | return this.checkResource(result.request) && this.checkContext(result.context);
}
checkIgnore(result, callback) {
|
<|file_name|>butterfaces-clientSideFilter.jquery.js<|end_file_name|><|fim▁begin|>/**
* butterItemFilterField is a jQuery plugin that filters html element with the css class <code>filterable-item</code>.
* It is applied to the search field.<br/>
* If no filter text is entered, then all filterable-items are displayed. Else the search field value is matched against <b>all</b> text contained by a filterable-item.
*
* How to use:
* jQuery("#someInputSelector").butterItemFilterField();
*
* Author: Yann Massard
*/
(function ($) {
var delay = (function () {
var timer = 0;
return function (callback, ms) {
clearTimeout(timer);
timer = setTimeout(callback, ms);
};
})();
// extend jQuery --------------------------------------------------------------------
$.fn.butterItemFilterField = function (filterableItemContainerSelector) {
return this.each(function () {
var $this = $(this);
$this.keyup(function () {
delay(function () {
var filterValue = $this.val();
// find container again every time, because it could have been rerendered.
var $filterableItemContainer;
if (filterableItemContainerSelector) {
$filterableItemContainer = $(filterableItemContainerSelector);
} else {
var containerSelector = $this.attr('data-filterable-item-container');
$filterableItemContainer = $(containerSelector);
}
var filterableItems = $filterableItemContainer.find('.filterable-item');
filterableItems.each(function (i, elem) {
var $filterableItem = $(elem);
if ($filterableItem.is(':containsIgnoreCase(' + filterValue + ')')) {
$filterableItem.removeAttr("hidden");
$filterableItem.highlight(filterValue);
} else {
$filterableItem.attr("hidden", "hidden");
}
});<|fim▁hole|> }, 300);
});
});
};
}(jQuery));
(function ($) {
$.expr[":"].containsIgnoreCase = $.expr.createPseudo(function (arg) {
return function (elem) {
return !arg || $(elem).text().toUpperCase().indexOf(arg.toUpperCase()) >= 0;
};
});
}(jQuery));<|fim▁end|> | |
<|file_name|>pod.go<|end_file_name|><|fim▁begin|>package test
import (
"testing"
"k8s.io/kubernetes/pkg/admission"
kapi "k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/unversioned"
"k8s.io/kubernetes/pkg/runtime"
buildapi "github.com/openshift/origin/pkg/build/api"
)
type TestPod kapi.Pod
func Pod() *TestPod {<|fim▁hole|>func (p *TestPod) WithAnnotation(name, value string) *TestPod {
if p.Annotations == nil {
p.Annotations = map[string]string{}
}
p.Annotations[name] = value
return p
}
func (p *TestPod) WithEnvVar(name, value string) *TestPod {
if len(p.Spec.Containers) == 0 {
p.Spec.Containers = append(p.Spec.Containers, kapi.Container{})
}
p.Spec.Containers[0].Env = append(p.Spec.Containers[0].Env, kapi.EnvVar{Name: name, Value: value})
return p
}
func (p *TestPod) WithBuild(t *testing.T, build *buildapi.Build, version string) *TestPod {
gv, err := unversioned.ParseGroupVersion(version)
if err != nil {
t.Fatalf("%v", err)
}
encodedBuild, err := runtime.Encode(kapi.Codecs.LegacyCodec(gv), build)
if err != nil {
t.Fatalf("%v", err)
}
return p.WithAnnotation(buildapi.BuildAnnotation, build.Name).WithEnvVar("BUILD", string(encodedBuild))
}
func (p *TestPod) EnvValue(name string) string {
if len(p.Spec.Containers) == 0 {
return ""
}
for _, ev := range p.Spec.Containers[0].Env {
if ev.Name == name {
return ev.Value
}
}
return ""
}
func (p *TestPod) GetBuild(t *testing.T) *buildapi.Build {
obj, err := runtime.Decode(kapi.Codecs.UniversalDecoder(), []byte(p.EnvValue("BUILD")))
if err != nil {
t.Fatalf("Could not decode build: %v", err)
}
build, ok := obj.(*buildapi.Build)
if !ok {
t.Fatalf("Not a build object: %#v", obj)
}
return build
}
func (p *TestPod) ToAttributes() admission.Attributes {
return admission.NewAttributesRecord((*kapi.Pod)(p),
kapi.Kind("Pod"),
"default",
"TestPod",
kapi.Resource("pods"),
"",
admission.Create,
nil)
}
func (p *TestPod) AsPod() *kapi.Pod {
return (*kapi.Pod)(p)
}<|fim▁end|> | return (*TestPod)(&kapi.Pod{})
}
|
<|file_name|>test_contrib_operator.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
from __future__ import print_function
import numpy as np
import mxnet as mx
import random
import itertools
from numpy.testing import assert_allclose, assert_array_equal
from mxnet.test_utils import *
from common import with_seed
import unittest
def test_box_nms_op():
def test_box_nms_forward(data, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1, cid=0, bid=-1,
force=False, in_format='corner', out_format='corner'):
for dtype in ['float16', 'float32', 'float64']:
data = mx.nd.array(data, dtype=dtype)
out = mx.contrib.nd.box_nms(data, overlap_thresh=thresh, valid_thresh=valid, topk=topk,
coord_start=coord, score_index=score, id_index=cid, background_id=bid,
force_suppress=force, in_format=in_format, out_format=out_format)
assert_almost_equal(out.asnumpy(), expected.astype(dtype), rtol=1e-3, atol=1e-3)
def test_box_nms_backward(data, grad, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1,
cid=0, bid=-1, force=False, in_format='corner', out_format='corner'):
in_var = mx.sym.Variable('data')
arr_data = mx.nd.array(data)
arr_grad = mx.nd.empty(arr_data.shape)
op = mx.contrib.sym.box_nms(in_var, overlap_thresh=thresh, valid_thresh=valid, topk=topk,
coord_start=coord, score_index=score, id_index=cid, background_id=bid,
force_suppress=force, in_format=in_format, out_format=out_format)
exe = op.bind(ctx=default_context(), args=[arr_data], args_grad=[arr_grad])
exe.forward(is_train=True)
exe.backward(mx.nd.array(grad))
assert_almost_equal(arr_grad.asnumpy(), expected)
def corner_to_center(data):
out = np.reshape(data, (-1, 6)).copy()
out[:, 2] = (data[:, 2] + data[:, 4]) / 2.0
out[:, 3] = (data[:, 3] + data[:, 5]) / 2.0
out[:, 4] = data[:, 4] - data[:, 2]
out[:, 5] = data[:, 5] - data[:, 3]
invalid = np.where(data[:, 0] < 0)[0]
out[invalid, :] = -1
return out
def center_to_corner(data):
data = np.reshape(data, (-1, 6)).copy()
out[:, 2] = data[:, 2] - data[:, 4] / 2.0
out[:, 3] = data[:, 3] - data[:, 5] / 2.0
out[:, 4] = data[:, 2] + data[:, 4] / 2.0
out[:, 5] = data[:, 3] + data[:, 5] / 2.0
invalid = np.where(data[:, 0] < 0)[0]
out[invalid, :] = -1
return out
def swap_position(data, expected, coord=2, score=1, cid=0, new_col=0):
data = np.reshape(data, (-1, 6))
expected = np.reshape(expected, (-1, 6))
new_coord = random.randint(0, 6 + new_col - 4)
others = list(range(new_coord)) + list(range(new_coord + 4, 6 + new_col))
random.shuffle(others)
new_score = others[0]
new_cid = others[1]
new_data = np.full((data.shape[0], data.shape[1] + new_col), -1.0)
new_expected = np.full((expected.shape[0], expected.shape[1] + new_col), -1.0)
new_data[:, new_coord:new_coord+4] = data[:, coord:coord+4]
new_data[:, new_score] = data[:, score]
new_data[:, new_cid] = data[:, cid]
new_expected[:, new_coord:new_coord+4] = expected[:, coord:coord+4]
new_expected[:, new_score] = expected[:, score]
new_expected[:, new_cid] = expected[:, cid]
return new_data, new_expected, new_coord, new_score, new_cid
# manually set up test cases
boxes = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
# case1
force = True
thresh = 0.5
expected = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, -1, -1, -1, -1, -1]]
grad = np.random.rand(4, 6)
expected_in_grad = grad[(1, 3, 2, 0), :]
expected_in_grad[1, :] = 0
test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes), grad, expected_in_grad, force=force, thresh=thresh)
# case2: multi batch
boxes2 = [boxes] * 3
expected2 = [expected] * 3
grad2 = np.array([grad.tolist()] * 3)
expected_in_grad2 = np.array([expected_in_grad.tolist()] * 3)
test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh)
# another new dim
boxes2 = [boxes2] * 2
expected2 = [expected2] * 2
grad2 = np.array([grad2.tolist()] * 2)
expected_in_grad2 = np.array([expected_in_grad2.tolist()] * 2)
test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh)
# case3: thresh
thresh = 0.1
boxes3 = boxes
expected3 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
grad3 = np.random.rand(4, 6)
expected_in_grad3 = grad3[(1, 3, 2, 0), :]
expected_in_grad3[(1, 2), :] = 0
test_box_nms_forward(np.array(boxes3), np.array(expected3), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes3), grad3, expected_in_grad3, force=force, thresh=thresh)
# case4: non-force
boxes4 = boxes
force = False
expected4 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[1, 0.4, 0.1, 0.1, 0.2, 0.2], [-1, -1, -1, -1, -1, -1]]
grad4 = np.random.rand(4, 6)
expected_in_grad4 = grad4[(1, 2, 3, 0), :]
expected_in_grad4[2, :] = 0
test_box_nms_forward(np.array(boxes4), np.array(expected4), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes4), grad4, expected_in_grad4, force=force, thresh=thresh)
# case5: different coding
boxes5 = corner_to_center(np.array(boxes4))
test_box_nms_forward(np.array(boxes5), np.array(expected4), force=force, thresh=thresh,
in_format='center')
expected5 = corner_to_center(np.array(expected4))
test_box_nms_forward(np.array(boxes4), np.array(expected5), force=force, thresh=thresh,
out_format='center')
test_box_nms_forward(np.array(boxes5), np.array(expected5), force=force, thresh=thresh,
in_format='center', out_format='center')
# case6: different position
boxes6, expected6, new_coord, new_score, new_id = swap_position(np.array(boxes4),
np.array(expected4), new_col=2)
test_box_nms_forward(np.array(boxes6), np.array(expected6), force=force, thresh=thresh,
coord=new_coord, score=new_score, cid=new_id)
# case7: no id, should be same with force=True
force = False
thresh = 0.5
test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh, cid=-1)
# case8: multi-batch thresh + topk
boxes8 = [[[1, 1, 0, 0, 10, 10], [1, 0.4, 0, 0, 10, 10], [1, 0.3, 0, 0, 10, 10]],
[[2, 1, 0, 0, 10, 10], [2, 0.4, 0, 0, 10, 10], [2, 0.3, 0, 0, 10, 10]],
[[3, 1, 0, 0, 10, 10], [3, 0.4, 0, 0, 10, 10], [3, 0.3, 0, 0, 10, 10]]]
expected8 = [[[1, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]],
[[2, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]],
[[3, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]]
grad8 = np.random.rand(3, 3, 6)
expected_in_grad8 = np.zeros((3, 3, 6))
expected_in_grad8[(0, 1, 2), (0, 0, 0), :] = grad8[(0, 1, 2), (0, 0, 0), :]
force = False
thresh = 0.5
valid = 0.5
topk = 2
test_box_nms_forward(np.array(boxes8), np.array(expected8), force=force, thresh=thresh, valid=valid, topk=topk)
test_box_nms_backward(np.array(boxes8), grad8, expected_in_grad8, force=force, thresh=thresh, valid=valid, topk=topk)
# case9: background id filter out
# default background id -1
boxes9 = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [0, 0.4, 0.1, 0.1, 0.2, 0.2],
[1, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, 0.6, 0.5, 0.5, 0.7, 0.8]]
expected9 = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.3, 0.1, 0.1, 0.14, 0.14],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
force = True
thresh = 0.5
grad9 = np.random.rand(4, 6)
expected_in_grad9 = grad9[(0, 2, 1, 3), :]
expected_in_grad9[(1, 3), :] = 0
test_box_nms_forward(np.array(boxes9), np.array(expected9), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes9), grad9, expected_in_grad9, force=force, thresh=thresh)
# set background id
background_id = 0
expected9 = [[-1, 0.6, 0.5, 0.5, 0.7, 0.8], [1, 0.3, 0.1, 0.1, 0.14, 0.14],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
grad9 = np.random.rand(4, 6)
expected_in_grad9 = grad9[(2, 3, 1, 0), :]
expected_in_grad9[(0, 1), :] = 0
test_box_nms_forward(np.array(boxes9), np.array(expected9), force=force, thresh=thresh, bid=background_id)
test_box_nms_backward(np.array(boxes9), grad9, expected_in_grad9, force=force, thresh=thresh, bid=background_id)
def test_box_iou_op():
def numpy_box_iou(a, b, fmt='corner'):
def area(left, top, right, bottom):
return np.maximum(0, right - left) * np.maximum(0, bottom - top)
assert a.shape[-1] == 4
assert b.shape[-1] == 4
oshape = a.shape[:-1] + b.shape[:-1]
a = a.reshape((-1, 4))
ashape = a.shape
b = b.reshape((-1, 4))
a = np.tile(a, reps=[1, b.shape[0]]).reshape((-1, 4))
b = np.tile(b, reps=[ashape[0], 1]).reshape((-1, 4))
if fmt == 'corner':
al, at, ar, ab = np.split(a, 4, axis=-1)
bl, bt, br, bb = np.split(b, 4, axis=-1)
elif fmt == 'center':
ax, ay, aw, ah = np.split(a, 4, axis=-1)
bx, by, bw, bh = np.split(b, 4, axis=-1)
al, at, ar, ab = ax - aw / 2, ay - ah / 2, ax + aw / 2, ay + ah / 2
bl, bt, br, bb = bx - bw / 2, by - bh / 2, bx + bw / 2, by + bh / 2
else:
raise NotImplementedError("Fmt {} not supported".format(fmt))
width = np.maximum(0, np.minimum(ar, br) - np.maximum(al, bl))
height = np.maximum(0, np.minimum(ab, bb) - np.maximum(at, bt))
intersect = width * height
union = area(al, at, ar, ab) + area(bl, bt, br, bb) - intersect
union[np.where(intersect <= 0)] = 1e-12
iou = intersect / union
return iou.reshape(oshape)
def generate_boxes(dims):
s1, off1, s2, off2 = np.random.rand(4) * 100
xy = np.random.rand(*(dims + [2])) * s1 + off1
wh = np.random.rand(*(dims + [2])) * s2 + off2
xywh = np.concatenate([xy, wh], axis=-1)
ltrb = np.concatenate([xy - wh / 2, xy + wh / 2], axis=-1)
return xywh, ltrb
for ndima in range(1, 6):
for ndimb in range(1, 6):
dims_a = np.random.randint(low=1, high=3, size=ndima).tolist()
dims_b = np.random.randint(low=1, high=3, size=ndimb).tolist()
# generate left, top, right, bottom
xywh_a, ltrb_a = generate_boxes(dims_a)
xywh_b, ltrb_b = generate_boxes(dims_b)
iou_np = numpy_box_iou(ltrb_a, ltrb_b, fmt='corner')
iou_np2 = numpy_box_iou(xywh_a, xywh_b, fmt='center')
iou_mx = mx.nd.contrib.box_iou(mx.nd.array(ltrb_a), mx.nd.array(ltrb_b), format='corner')
iou_mx2 = mx.nd.contrib.box_iou(mx.nd.array(xywh_a), mx.nd.array(xywh_b), format='center')
assert_allclose(iou_np, iou_np2, rtol=1e-5, atol=1e-5)
assert_allclose(iou_np, iou_mx.asnumpy(), rtol=1e-5, atol=1e-5)
assert_allclose(iou_np, iou_mx2.asnumpy(), rtol=1e-5, atol=1e-5)
def test_bipartite_matching_op():
def assert_match(inputs, x, y, threshold, is_ascend=False):
for dtype in ['float16', 'float32', 'float64']:
inputs = mx.nd.array(inputs, dtype=dtype)
x = np.array(x, dtype=dtype)
y = np.array(y, dtype=dtype)
a, b = mx.nd.contrib.bipartite_matching(inputs, threshold=threshold, is_ascend=is_ascend)
assert_array_equal(a.asnumpy().astype('int64'), x.astype('int64'))
assert_array_equal(b.asnumpy().astype('int64'), y.astype('int64'))
assert_match([[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]], [1, -1, 0], [2, 0], 1e-12, False)
assert_match([[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]], [-1, 0, 1], [1, 2], 100, True)
def test_multibox_target_op():
anchors = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]], ctx=default_context()).reshape((1, -1, 4))
cls_pred = mx.nd.array(list(range(10)), ctx=default_context()).reshape((1, -1, 2))
label = mx.nd.array([1, 0.1, 0.1, 0.5, 0.6], ctx=default_context()).reshape((1, -1, 5))
loc_target, loc_mask, cls_target = \
mx.nd.contrib.MultiBoxTarget(anchors, label, cls_pred,
overlap_threshold=0.5,
negative_mining_ratio=3,
negative_mining_thresh=0.4)
expected_loc_target = np.array([[5.0, 2.5000005, 3.4657357, 4.581454, 0., 0., 0., 0.]])
expected_loc_mask = np.array([[1, 1, 1, 1, 0, 0, 0, 0]])
expected_cls_target = np.array([[2, 0]])
assert_allclose(loc_target.asnumpy(), expected_loc_target, rtol=1e-5, atol=1e-5)
assert_array_equal(loc_mask.asnumpy(), expected_loc_mask)
assert_array_equal(cls_target.asnumpy(), expected_cls_target)
def test_gradient_multiplier_op():
# We use the quadratic function in combination with gradient multiplier
def f(x, a, b, c):
return a * x**2 + b * x + c
a = np.random.random_sample()
b = np.random.random_sample()
c = np.random.random_sample()
m = np.random.random_sample() - 0.5
data = mx.symbol.Variable('data')
quad_sym = mx.sym.contrib.quadratic(data=data, a=a, b=b, c=c)
gr_q_sym = mx.sym.contrib.gradientmultiplier(quad_sym, scalar=m)
for dtype in [np.float16, np.float32, np.float64]:
for ndim in range(1, 6):
shape = rand_shape_nd(ndim, 5)
data_np = np.random.randn(*shape).astype(dtype)
expected = f(data_np, a, b, c)
backward_expected = (2 * a * data_np + b) * m
# check imperative forward
output = mx.nd.contrib.quadratic(mx.nd.array(data_np), a=a, b=b, c=c)
output = mx.nd.contrib.gradientmultiplier(output, scalar=m)
assert_almost_equal(output.asnumpy(), expected,
rtol=1e-2 if dtype is np.float16 else 1e-5,
atol=1e-2 if dtype is np.float16 else 1e-5)
# check forward
check_symbolic_forward(gr_q_sym, [data_np], [expected],
rtol=1e-2 if dtype is np.float16 else 1e-5,
atol=1e-2 if dtype is np.float16 else 1e-5)
# check backward
check_symbolic_backward(gr_q_sym, [data_np], [np.ones(expected.shape)],
[backward_expected],
rtol=1e-2 if dtype is np.float16 else 1e-5,
atol=1e-2 if dtype is np.float16 else 1e-5)
def test_multibox_prior_op():
h = 561
w = 728
X = mx.nd.random.uniform(shape=(1, 3, h, w))
Y = mx.contrib.nd.MultiBoxPrior(X, sizes=[0.75, 0.5, 0.25], ratios=[1, 2, 0.5])
assert_array_equal(Y.shape, np.array((1, 2042040, 4)))
boxes = Y.reshape((h, w, 5, 4))
assert_allclose(boxes.asnumpy()[250, 250, 0, :], np.array([0.055117, 0.071524, 0.63307 , 0.821524]), atol=1e-5, rtol=1e-5)
# relax first ratio if user insists
Y = mx.contrib.nd.MultiBoxPrior(X, sizes=[0.75, 0.5, 0.25], ratios=[20, 2, 0.5])
boxes = Y.reshape((h, w, 5, 4))
assert_allclose(boxes.asnumpy()[250, 250, 0, :], np.array([-0.948249, 0.362671, 1.636436, 0.530377]), atol=1e-5, rtol=1e-5)
def test_box_encode_op():
anchors = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
refs = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
samples = mx.nd.array([[0, 1]])
matches = mx.nd.array([[0, 1]])
means = mx.nd.array([0.0, 0.0, 0.0, 0.0])
stds = mx.nd.array([0.1, 0.1, 0.2, 0.2])
Y, mask = mx.nd.contrib.box_encode(samples, matches, anchors, refs, means, stds)
assert_allclose(Y.asnumpy(), np.zeros((1, 2, 4)), atol=1e-5, rtol=1e-5)
assert_allclose(mask.asnumpy(), np.array([[[0., 0., 0., 0.], [1., 1., 1., 1.]]]), atol=1e-5, rtol=1e-5)
def test_box_decode_op():
data = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
anchors = mx.nd.array([[0.1, 0.2, 0.3, 0.4], [0.5, 0.6, 0.7, 0.8]]).reshape((1, -1, 4))
Y = mx.nd.contrib.box_decode(data, anchors, .1, .1, .2, .2)
assert_allclose(Y.asnumpy(), np.array([[[-0.0562755, -0.00865743, 0.26227552, 0.42465743], \<|fim▁hole|>def test_op_mrcnn_mask_target():
if default_context().device_type != 'gpu':
return
num_rois = 2
num_classes = 4
mask_size = (3, 3)
ctx = mx.gpu(0)
# (B, N, 4)
rois = mx.nd.array([[[2.3, 4.3, 2.2, 3.3],
[3.5, 5.5, 0.9, 2.4]]], ctx=ctx)
gt_masks = mx.nd.arange(0, 4*32*32, ctx=ctx).reshape(1, 4, 32, 32)
# (B, N)
matches = mx.nd.array([[2, 0]], ctx=ctx)
# (B, N)
cls_targets = mx.nd.array([[2, 1]], ctx=ctx)
mask_targets, mask_cls = mx.nd.contrib.mrcnn_mask_target(rois, gt_masks, matches, cls_targets,
num_rois=num_rois,
num_classes=num_classes,
mask_size=mask_size)
# Ground truth outputs were generated with GluonCV's target generator
# gluoncv.model_zoo.mask_rcnn.MaskTargetGenerator(1, num_rois, num_classes, mask_size)
gt_mask_targets = mx.nd.array([[[[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]],
[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]],
[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]],
[[2193.4 , 2193.7332 , 2194.0667 ],
[2204.0667 , 2204.4 , 2204.7334 ],
[2214.7334 , 2215.0667 , 2215.4 ]]],
[[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]],
[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]],
[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]],
[[ 185. , 185.33334, 185.66667],
[ 195.66667, 196.00002, 196.33334],
[ 206.33333, 206.66666, 207. ]]]]])
gt_mask_cls = mx.nd.array([[0,0,1,0], [0,1,0,0]])
gt_mask_cls = gt_mask_cls.reshape(1,2,4,1,1).broadcast_axes(axis=(3,4), size=(3,3))
assert_almost_equal(mask_targets.asnumpy(), gt_mask_targets.asnumpy())
assert_almost_equal(mask_cls.asnumpy(), gt_mask_cls.asnumpy())
if __name__ == '__main__':
import nose
nose.runmodule()<|fim▁end|> | [0.13240421, 0.17859563, 0.93759584, 1.1174043 ]]]), atol=1e-5, rtol=1e-5)
@with_seed() |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import *
from apps.profile import views
urlpatterns = patterns('',
url(r'^get_preferences?/?', views.get_preference),
url(r'^set_preference/?', views.set_preference),
url(r'^set_account_settings/?', views.set_account_settings),
url(r'^get_view_setting/?', views.get_view_setting),
url(r'^set_view_setting/?', views.set_view_setting),
url(r'^set_collapsed_folders/?', views.set_collapsed_folders),
url(r'^paypal_form/?', views.paypal_form),
url(r'^paypal_return/?', views.paypal_return, name='paypal-return'),
url(r'^is_premium/?', views.profile_is_premium, name='profile-is-premium'),
url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'),
url(r'^stripe_form/?', views.stripe_form, name='stripe-form'),
url(r'^activities/?', views.load_activities, name='profile-activities'),
url(r'^payment_history/?', views.payment_history, name='profile-payment-history'),
url(r'^cancel_premium/?', views.cancel_premium, name='profile-cancel-premium'),
url(r'^refund_premium/?', views.refund_premium, name='profile-refund-premium'),
url(r'^upgrade_premium/?', views.upgrade_premium, name='profile-upgrade-premium'),
url(r'^delete_account/?', views.delete_account, name='profile-delete-account'),
url(r'^forgot_password_return/?', views.forgot_password_return, name='profile-forgot-password-return'),
url(r'^forgot_password/?', views.forgot_password, name='profile-forgot-password'),
url(r'^delete_all_sites/?', views.delete_all_sites, name='profile-delete-all-sites'),
url(r'^email_optout/?', views.email_optout, name='profile-email-optout'),<|fim▁hole|><|fim▁end|> | ) |
<|file_name|>CorpDetatHelper.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package helper;
import bean.CorpDetat;
import java.util.List;
import javax.swing.JTable;
/**
*
* @author kamal
*/
public class CorpDetatHelper extends AbstractViewHelper<CorpDetat>{
public CorpDetatHelper(JTable jTable, List<CorpDetat> corpDetats) {
super(new String[]{ "Corp Detat"});
this.jTable = jTable;
this.list = corpDetats;
}
@Override
public Object getValueAt(int rowIndex, int columnIndex) {
if (rowIndex < list.size()) {
switch (columnIndex) {
case 0:
return list.get(rowIndex).getTitre();
case 1:
return null;
default:
return null;
}
}
return null;
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>hidden_network_workaround.py<|end_file_name|><|fim▁begin|># My computer was failing to recognize wifi networks after being woken up from sleep so this uses the network manager command
# line tool to force my computer to recognize the network I type in to the terminal.<|fim▁hole|>
import subprocess
network_name = raw_input("What is the name of your network? ")
subprocess.check_call(['nmcli', 'c', 'up', 'id', network_name])<|fim▁end|> | |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | class URIError(Exception):
pass |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from sorl.thumbnail import ImageField
from warnings import warn
class Image(models.Model):
# link to other objects using the ContentType system
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
# store the actual image
image = ImageField( upload_to="images" )
# added
source = models.CharField(max_length=400)
# user
is_primary = models.BooleanField( default=False )
def save(self, *args, **kwargs):
"""
Only one image should be marked as is_primary for an object.
"""
<|fim▁hole|> content_type = self.content_type,
object_id = self.object_id,
)
# check that we are not first entry for content_object
if not siblings.count():
self.is_primary = True
super(Image, self).save(*args, **kwargs)
# If we are true then make sure all others are false
if self.is_primary is True:
primary_siblings = siblings.exclude( is_primary = False ).exclude( id = self.id )
for sibling in primary_siblings:
sibling.is_primary = False
sibling.save()
class HasImageMixin():
def primary_image(self):
primary = self.images.filter(is_primary=True)
if primary.count():
return primary[0].image
return None<|fim▁end|> | # other images for this object
siblings = Image.objects.filter( |
<|file_name|>f4m.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import base64
import io
import itertools
import os
import time
import xml.etree.ElementTree as etree
from .common import FileDownloader
from .http import HttpFD
from ..utils import (
struct_pack,
struct_unpack,
compat_urlparse,
format_bytes,
encodeFilename,
sanitize_open,
)
class FlvReader(io.BytesIO):
"""
Reader for Flv files
The file format is documented in https://www.adobe.com/devnet/f4v.html
"""
# Utility functions for reading numbers and strings
def read_unsigned_long_long(self):
return struct_unpack('!Q', self.read(8))[0]
def read_unsigned_int(self):
return struct_unpack('!I', self.read(4))[0]
def read_unsigned_char(self):
return struct_unpack('!B', self.read(1))[0]
def read_string(self):
res = b''
while True:
char = self.read(1)
if char == b'\x00':
break
res += char
return res
def read_box_info(self):
"""
Read a box and return the info as a tuple: (box_size, box_type, box_data)
"""
real_size = size = self.read_unsigned_int()
box_type = self.read(4)
header_end = 8
if size == 1:
real_size = self.read_unsigned_long_long()
header_end = 16
return real_size, box_type, self.read(real_size-header_end)
def read_asrt(self):
# version
self.read_unsigned_char()
# flags
self.read(3)
quality_entry_count = self.read_unsigned_char()
# QualityEntryCount
for i in range(quality_entry_count):
self.read_string()
segment_run_count = self.read_unsigned_int()
segments = []
for i in range(segment_run_count):
first_segment = self.read_unsigned_int()
fragments_per_segment = self.read_unsigned_int()
segments.append((first_segment, fragments_per_segment))
return {
'segment_run': segments,
}
def read_afrt(self):
# version
self.read_unsigned_char()
# flags
self.read(3)
# time scale
self.read_unsigned_int()
quality_entry_count = self.read_unsigned_char()
# QualitySegmentUrlModifiers
for i in range(quality_entry_count):
self.read_string()
fragments_count = self.read_unsigned_int()
fragments = []
for i in range(fragments_count):
first = self.read_unsigned_int()
first_ts = self.read_unsigned_long_long()
duration = self.read_unsigned_int()
if duration == 0:
discontinuity_indicator = self.read_unsigned_char()
else:
discontinuity_indicator = None
fragments.append({
'first': first,
'ts': first_ts,
'duration': duration,
'discontinuity_indicator': discontinuity_indicator,
})
return {
'fragments': fragments,
}
def read_abst(self):
# version
self.read_unsigned_char()
# flags
self.read(3)
self.read_unsigned_int() # BootstrapinfoVersion
# Profile,Live,Update,Reserved
self.read(1)
# time scale
self.read_unsigned_int()
# CurrentMediaTime<|fim▁hole|> self.read_unsigned_long_long()
# SmpteTimeCodeOffset
self.read_unsigned_long_long()
self.read_string() # MovieIdentifier
server_count = self.read_unsigned_char()
# ServerEntryTable
for i in range(server_count):
self.read_string()
quality_count = self.read_unsigned_char()
# QualityEntryTable
for i in range(quality_count):
self.read_string()
# DrmData
self.read_string()
# MetaData
self.read_string()
segments_count = self.read_unsigned_char()
segments = []
for i in range(segments_count):
box_size, box_type, box_data = self.read_box_info()
assert box_type == b'asrt'
segment = FlvReader(box_data).read_asrt()
segments.append(segment)
fragments_run_count = self.read_unsigned_char()
fragments = []
for i in range(fragments_run_count):
box_size, box_type, box_data = self.read_box_info()
assert box_type == b'afrt'
fragments.append(FlvReader(box_data).read_afrt())
return {
'segments': segments,
'fragments': fragments,
}
def read_bootstrap_info(self):
total_size, box_type, box_data = self.read_box_info()
assert box_type == b'abst'
return FlvReader(box_data).read_abst()
def read_bootstrap_info(bootstrap_bytes):
return FlvReader(bootstrap_bytes).read_bootstrap_info()
def build_fragments_list(boot_info):
""" Return a list of (segment, fragment) for each fragment in the video """
res = []
segment_run_table = boot_info['segments'][0]
# I've only found videos with one segment
segment_run_entry = segment_run_table['segment_run'][0]
n_frags = segment_run_entry[1]
fragment_run_entry_table = boot_info['fragments'][0]['fragments']
first_frag_number = fragment_run_entry_table[0]['first']
for (i, frag_number) in zip(range(1, n_frags+1), itertools.count(first_frag_number)):
res.append((1, frag_number))
return res
def write_flv_header(stream, metadata):
"""Writes the FLV header and the metadata to stream"""
# FLV header
stream.write(b'FLV\x01')
stream.write(b'\x05')
stream.write(b'\x00\x00\x00\x09')
# FLV File body
stream.write(b'\x00\x00\x00\x00')
# FLVTAG
# Script data
stream.write(b'\x12')
# Size of the metadata with 3 bytes
stream.write(struct_pack('!L', len(metadata))[1:])
stream.write(b'\x00\x00\x00\x00\x00\x00\x00')
stream.write(metadata)
# Magic numbers extracted from the output files produced by AdobeHDS.php
#(https://github.com/K-S-V/Scripts)
stream.write(b'\x00\x00\x01\x73')
def _add_ns(prop):
return '{http://ns.adobe.com/f4m/1.0}%s' % prop
class HttpQuietDownloader(HttpFD):
def to_screen(self, *args, **kargs):
pass
class F4mFD(FileDownloader):
"""
A downloader for f4m manifests or AdobeHDS.
"""
def real_download(self, filename, info_dict):
man_url = info_dict['url']
self.to_screen('[download] Downloading f4m manifest')
manifest = self.ydl.urlopen(man_url).read()
self.report_destination(filename)
http_dl = HttpQuietDownloader(self.ydl,
{
'continuedl': True,
'quiet': True,
'noprogress': True,
'test': self.params.get('test', False),
})
doc = etree.fromstring(manifest)
formats = [(int(f.attrib.get('bitrate', -1)), f) for f in doc.findall(_add_ns('media'))]
formats = sorted(formats, key=lambda f: f[0])
rate, media = formats[-1]
base_url = compat_urlparse.urljoin(man_url, media.attrib['url'])
bootstrap = base64.b64decode(doc.find(_add_ns('bootstrapInfo')).text)
metadata = base64.b64decode(media.find(_add_ns('metadata')).text)
boot_info = read_bootstrap_info(bootstrap)
fragments_list = build_fragments_list(boot_info)
if self.params.get('test', False):
# We only download the first fragment
fragments_list = fragments_list[:1]
total_frags = len(fragments_list)
tmpfilename = self.temp_name(filename)
(dest_stream, tmpfilename) = sanitize_open(tmpfilename, 'wb')
write_flv_header(dest_stream, metadata)
# This dict stores the download progress, it's updated by the progress
# hook
state = {
'downloaded_bytes': 0,
'frag_counter': 0,
}
start = time.time()
def frag_progress_hook(status):
frag_total_bytes = status.get('total_bytes', 0)
estimated_size = (state['downloaded_bytes'] +
(total_frags - state['frag_counter']) * frag_total_bytes)
if status['status'] == 'finished':
state['downloaded_bytes'] += frag_total_bytes
state['frag_counter'] += 1
progress = self.calc_percent(state['frag_counter'], total_frags)
byte_counter = state['downloaded_bytes']
else:
frag_downloaded_bytes = status['downloaded_bytes']
byte_counter = state['downloaded_bytes'] + frag_downloaded_bytes
frag_progress = self.calc_percent(frag_downloaded_bytes,
frag_total_bytes)
progress = self.calc_percent(state['frag_counter'], total_frags)
progress += frag_progress / float(total_frags)
eta = self.calc_eta(start, time.time(), estimated_size, byte_counter)
self.report_progress(progress, format_bytes(estimated_size),
status.get('speed'), eta)
http_dl.add_progress_hook(frag_progress_hook)
frags_filenames = []
for (seg_i, frag_i) in fragments_list:
name = 'Seg%d-Frag%d' % (seg_i, frag_i)
url = base_url + name
frag_filename = '%s-%s' % (tmpfilename, name)
success = http_dl.download(frag_filename, {'url': url})
if not success:
return False
with open(frag_filename, 'rb') as down:
down_data = down.read()
reader = FlvReader(down_data)
while True:
_, box_type, box_data = reader.read_box_info()
if box_type == b'mdat':
dest_stream.write(box_data)
break
frags_filenames.append(frag_filename)
dest_stream.close()
self.report_finish(format_bytes(state['downloaded_bytes']), time.time() - start)
self.try_rename(tmpfilename, filename)
for frag_file in frags_filenames:
os.remove(frag_file)
fsize = os.path.getsize(encodeFilename(filename))
self._hook_progress({
'downloaded_bytes': fsize,
'total_bytes': fsize,
'filename': filename,
'status': 'finished',
})
return True<|fim▁end|> | |
<|file_name|>plot_self_training_varying_threshold.py<|end_file_name|><|fim▁begin|>"""
=============================================
Effect of varying threshold for self-training
=============================================
This example illustrates the effect of a varying threshold on self-training.
The `breast_cancer` dataset is loaded, and labels are deleted such that only 50
out of 569 samples have labels. A `SelfTrainingClassifier` is fitted on this
dataset, with varying thresholds.
The upper graph shows the amount of labeled samples that the classifier has
available by the end of fit, and the accuracy of the classifier. The lower
graph shows the last iteration in which a sample was labeled. All values are
cross validated with 3 folds.
At low thresholds (in [0.4, 0.5]), the classifier learns from samples that were
labeled with a low confidence. These low-confidence samples are likely have
incorrect predicted labels, and as a result, fitting on these incorrect labels
produces a poor accuracy. Note that the classifier labels almost all of the
samples, and only takes one iteration.
For very high thresholds (in [0.9, 1)) we observe that the classifier does not
augment its dataset (the amount of self-labeled samples is 0). As a result, the
accuracy achieved with a threshold of 0.9999 is the same as a normal supervised
classifier would achieve.
The optimal accuracy lies in between both of these extremes at a threshold of
around 0.7.
"""
# Authors: Oliver Rausch <[email protected]>
# License: BSD
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.svm import SVC
from sklearn.model_selection import StratifiedKFold
from sklearn.semi_supervised import SelfTrainingClassifier
from sklearn.metrics import accuracy_score
from sklearn.utils import shuffle
n_splits = 3
X, y = datasets.load_breast_cancer(return_X_y=True)
X, y = shuffle(X, y, random_state=42)
y_true = y.copy()
y[50:] = -1
total_samples = y.shape[0]
base_classifier = SVC(probability=True, gamma=0.001, random_state=42)
x_values = np.arange(0.4, 1.05, 0.05)
x_values = np.append(x_values, 0.99999)
scores = np.empty((x_values.shape[0], n_splits))
amount_labeled = np.empty((x_values.shape[0], n_splits))
amount_iterations = np.empty((x_values.shape[0], n_splits))
for (i, threshold) in enumerate(x_values):
self_training_clf = SelfTrainingClassifier(base_classifier, threshold=threshold)
# We need manual cross validation so that we don't treat -1 as a separate
# class when computing accuracy
skfolds = StratifiedKFold(n_splits=n_splits)
for fold, (train_index, test_index) in enumerate(skfolds.split(X, y)):
X_train = X[train_index]
y_train = y[train_index]
X_test = X[test_index]
y_test = y[test_index]
y_test_true = y_true[test_index]
self_training_clf.fit(X_train, y_train)
# The amount of labeled samples that at the end of fitting
amount_labeled[i, fold] = (
total_samples
- np.unique(self_training_clf.labeled_iter_, return_counts=True)[1][0]
)
# The last iteration the classifier labeled a sample in
amount_iterations[i, fold] = np.max(self_training_clf.labeled_iter_)
y_pred = self_training_clf.predict(X_test)
scores[i, fold] = accuracy_score(y_test_true, y_pred)
ax1 = plt.subplot(211)
ax1.errorbar(
x_values, scores.mean(axis=1), yerr=scores.std(axis=1), capsize=2, color="b"
)
ax1.set_ylabel("Accuracy", color="b")
ax1.tick_params("y", colors="b")
ax2 = ax1.twinx()
ax2.errorbar(
x_values,
amount_labeled.mean(axis=1),
yerr=amount_labeled.std(axis=1),
capsize=2,
color="g",
)
ax2.set_ylim(bottom=0)
ax2.set_ylabel("Amount of labeled samples", color="g")
ax2.tick_params("y", colors="g")<|fim▁hole|>ax3.errorbar(
x_values,
amount_iterations.mean(axis=1),
yerr=amount_iterations.std(axis=1),
capsize=2,
color="b",
)
ax3.set_ylim(bottom=0)
ax3.set_ylabel("Amount of iterations")
ax3.set_xlabel("Threshold")
plt.show()<|fim▁end|> |
ax3 = plt.subplot(212, sharex=ax1) |
<|file_name|>router.js<|end_file_name|><|fim▁begin|>/*
YUI 3.16.0 (build 76f0e08)
Copyright 2014 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add('router', function (Y, NAME) {
/**
Provides URL-based routing using HTML5 `pushState()` or the location hash.
@module app
@submodule router
@since 3.4.0
**/
var HistoryHash = Y.HistoryHash,
QS = Y.QueryString,
YArray = Y.Array,
YLang = Y.Lang,
YObject = Y.Object,
win = Y.config.win,
// Holds all the active router instances. This supports the static
// `dispatch()` method which causes all routers to dispatch.
instances = [],
// We have to queue up pushState calls to avoid race conditions, since the
// popstate event doesn't actually provide any info on what URL it's
// associated with.
saveQueue = [],
/**
Fired when the router is ready to begin dispatching to route handlers.
You shouldn't need to wait for this event unless you plan to implement some
kind of custom dispatching logic. It's used internally in order to avoid
dispatching to an initial route if a browser history change occurs first.
@event ready
@param {Boolean} dispatched `true` if routes have already been dispatched
(most likely due to a history change).
@fireOnce
**/
EVT_READY = 'ready';
/**
Provides URL-based routing using HTML5 `pushState()` or the location hash.
This makes it easy to wire up route handlers for different application states
while providing full back/forward navigation support and bookmarkable, shareable
URLs.
@class Router
@param {Object} [config] Config properties.
@param {Boolean} [config.html5] Overrides the default capability detection
and forces this router to use (`true`) or not use (`false`) HTML5
history.
@param {String} [config.root=''] Root path from which all routes should be
evaluated.
@param {Array} [config.routes=[]] Array of route definition objects.
@constructor
@extends Base
@since 3.4.0
**/
function Router() {
Router.superclass.constructor.apply(this, arguments);
}
Y.Router = Y.extend(Router, Y.Base, {
// -- Protected Properties -------------------------------------------------
/**
Whether or not `_dispatch()` has been called since this router was
instantiated.
@property _dispatched
@type Boolean
@default undefined
@protected
**/
/**
Whether or not we're currently in the process of dispatching to routes.
@property _dispatching
@type Boolean
@default undefined
@protected
**/
/**
History event handle for the `history:change` or `hashchange` event
subscription.
@property _historyEvents
@type EventHandle
@protected
**/
/**
Cached copy of the `html5` attribute for internal use.
@property _html5
@type Boolean
@protected
**/
/**
Map which holds the registered param handlers in the form:
`name` -> RegExp | Function.
@property _params
@type Object
@protected
@since 3.12.0
**/
/**
Whether or not the `ready` event has fired yet.
@property _ready
@type Boolean
@default undefined
@protected
**/
/**
Regex used to break up a URL string around the URL's path.
Subpattern captures:
1. Origin, everything before the URL's path-part.
2. The URL's path-part.
3. The URL's query.
4. The URL's hash fragment.
@property _regexURL
@type RegExp
@protected
@since 3.5.0
**/
_regexURL: /^((?:[^\/#?:]+:\/\/|\/\/)[^\/]*)?([^?#]*)(\?[^#]*)?(#.*)?$/,
/**
Regex used to match parameter placeholders in route paths.
Subpattern captures:
1. Parameter prefix character. Either a `:` for subpath parameters that
should only match a single level of a path, or `*` for splat parameters
that should match any number of path levels.
2. Parameter name, if specified, otherwise it is a wildcard match.
@property _regexPathParam
@type RegExp
@protected
**/
_regexPathParam: /([:*])([\w\-]+)?/g,
/**
Regex that matches and captures the query portion of a URL, minus the
preceding `?` character, and discarding the hash portion of the URL if any.
@property _regexUrlQuery
@type RegExp
@protected
**/
_regexUrlQuery: /\?([^#]*).*$/,
/**
Regex that matches everything before the path portion of a URL (the origin).
This will be used to strip this part of the URL from a string when we
only want the path.
@property _regexUrlOrigin
@type RegExp
@protected
**/
_regexUrlOrigin: /^(?:[^\/#?:]+:\/\/|\/\/)[^\/]*/,
/**
Collection of registered routes.
@property _routes
@type Array
@protected
**/
// -- Lifecycle Methods ----------------------------------------------------
initializer: function (config) {
var self = this;
self._html5 = self.get('html5');
self._params = {};
self._routes = [];
self._url = self._getURL();
// Necessary because setters don't run on init.
self._setRoutes(config && config.routes ? config.routes :
self.get('routes'));
// Set up a history instance or hashchange listener.
if (self._html5) {
self._history = new Y.HistoryHTML5({force: true});
self._historyEvents =
Y.after('history:change', self._afterHistoryChange, self);
} else {
self._historyEvents =
Y.on('hashchange', self._afterHistoryChange, win, self);
}
// Fire a `ready` event once we're ready to route. We wait first for all
// subclass initializers to finish, then for window.onload, and then an
// additional 20ms to allow the browser to fire a useless initial
// `popstate` event if it wants to (and Chrome always wants to).
self.publish(EVT_READY, {
defaultFn : self._defReadyFn,
fireOnce : true,
preventable: false
});
self.once('initializedChange', function () {
Y.once('load', function () {
setTimeout(function () {
self.fire(EVT_READY, {dispatched: !!self._dispatched});
}, 20);
});
});
// Store this router in the collection of all active router instances.
instances.push(this);
},
destructor: function () {
var instanceIndex = YArray.indexOf(instances, this);
// Remove this router from the collection of active router instances.
if (instanceIndex > -1) {
instances.splice(instanceIndex, 1);
}
if (this._historyEvents) {
this._historyEvents.detach();
}
},
// -- Public Methods -------------------------------------------------------
/**
Dispatches to the first route handler that matches the current URL, if any.
If `dispatch()` is called before the `ready` event has fired, it will
automatically wait for the `ready` event before dispatching. Otherwise it
will dispatch immediately.
@method dispatch
@chainable
**/
dispatch: function () {
this.once(EVT_READY, function () {
var req, res;
this._ready = true;
if (!this.upgrade()) {
req = this._getRequest('dispatch');
res = this._getResponse(req);
this._dispatch(req, res);
}
});
return this;
},
/**
Gets the current route path.
@method getPath
@return {String} Current route path.
**/
getPath: function () {
return this._getPath();
},
/**
Returns `true` if this router has at least one route that matches the
specified URL, `false` otherwise. This also checks that any named `param`
handlers also accept app param values in the `url`.
This method enforces the same-origin security constraint on the specified
`url`; any URL which is not from the same origin as the current URL will
always return `false`.
@method hasRoute
@param {String} url URL to match.
@return {Boolean} `true` if there's at least one matching route, `false`
otherwise.
**/
hasRoute: function (url) {
var path, routePath, routes;
if (!this._hasSameOrigin(url)) {
return false;
}
if (!this._html5) {
url = this._upgradeURL(url);
}
// Get just the path portion of the specified `url`. The `match()`
// method does some special checking that the `path` is within the root.
path = this.removeQuery(url.replace(this._regexUrlOrigin, ''));
routes = this.match(path);
if (!routes.length) {
return false;
}
routePath = this.removeRoot(path);
// Check that there's at least one route whose param handlers also
// accept all the param values.
return !!YArray.filter(routes, function (route) {
// Get the param values for the route and path to see whether the
// param handlers accept or reject the param values. Include any
// route whose named param handlers accept *all* param values. This
// will return `false` if a param handler rejects a param value.
return this._getParamValues(route, routePath);
}, this).length;
},
/**
Returns an array of route objects that match the specified URL path.
If this router has a `root`, then the specified `path` _must_ be
semantically within the `root` path to match any routes.
This method is called internally to determine which routes match the current
path whenever the URL changes. You may override it if you want to customize
the route matching logic, although this usually shouldn't be necessary.
Each returned route object has the following properties:
* `callback`: A function or a string representing the name of a function
this router that should be executed when the route is triggered.
* `keys`: An array of strings representing the named parameters defined in
the route's path specification, if any.
* `path`: The route's path specification, which may be either a string or
a regex.
* `regex`: A regular expression version of the route's path specification.
This regex is used to determine whether the route matches a given path.
@example
router.route('/foo', function () {});
router.match('/foo');
// => [{callback: ..., keys: [], path: '/foo', regex: ...}]
@method match
@param {String} path URL path to match. This should be an absolute path that
starts with a slash: "/".
@return {Object[]} Array of route objects that match the specified path.
**/
match: function (path) {
var root = this.get('root');
if (root) {
// The `path` must be semantically within this router's `root` path
// or mount point, if it's not then no routes should be considered a
// match.
if (!this._pathHasRoot(root, path)) {
return [];
}
// Remove this router's `root` from the `path` before checking the
// routes for any matches.
path = this.removeRoot(path);
}
return YArray.filter(this._routes, function (route) {
return path.search(route.regex) > -1;
});
},
/**
Adds a handler for a route param specified by _name_.
Param handlers can be registered via this method and are used to
validate/format values of named params in routes before dispatching to the
route's handler functions. Using param handlers allows routes to defined
using string paths which allows for `req.params` to use named params, but
still applying extra validation or formatting to the param values parsed
from the URL.
If a param handler regex or function returns a value of `false`, `null`,
`undefined`, or `NaN`, the current route will not match and be skipped. All
other return values will be used in place of the original param value parsed
from the URL.
@example
router.param('postId', function (value) {
return parseInt(value, 10);
});
router.param('username', /^\w+$/);
router.route('/posts/:postId', function (req) {
});
router.route('/users/:username', function (req) {
// `req.params.username` is an array because the result of calling
// `exec()` on the regex is assigned as the param's value.
});
router.route('*', function () {
});
// URLs which match routes:
router.save('/posts/1'); // => "Post: 1"
router.save('/users/ericf'); // => "User: ericf"
// URLs which do not match routes because params fail validation:
router.save('/posts/a'); // => "Catch-all no routes matched!"
router.save('/users/ericf,rgrove'); // => "Catch-all no routes matched!"
@method param
@param {String} name Name of the param used in route paths.
@param {Function|RegExp} handler Function to invoke or regular expression to
`exec()` during route dispatching whose return value is used as the new
param value. Values of `false`, `null`, `undefined`, or `NaN` will cause
the current route to not match and be skipped. When a function is
specified, it will be invoked in the context of this instance with the
following parameters:
@param {String} handler.value The current param value parsed from the URL.
@param {String} handler.name The name of the param.
@chainable
@since 3.12.0
**/
param: function (name, handler) {
this._params[name] = handler;
return this;
},
/**
Removes the `root` URL from the front of _url_ (if it's there) and returns
the result. The returned path will always have a leading `/`.
@method removeRoot
@param {String} url URL.
@return {String} Rootless path.
**/
removeRoot: function (url) {
var root = this.get('root'),
path;
// Strip out the non-path part of the URL, if any (e.g.
// "http://foo.com"), so that we're left with just the path.
url = url.replace(this._regexUrlOrigin, '');
// Return the host-less URL if there's no `root` path to further remove.
if (!root) {
return url;
}
path = this.removeQuery(url);
// Remove the `root` from the `url` if it's the same or its path is
// semantically within the root path.
if (path === root || this._pathHasRoot(root, path)) {
url = url.substring(root.length);
}
return url.charAt(0) === '/' ? url : '/' + url;
},
/**
Removes a query string from the end of the _url_ (if one exists) and returns
the result.
@method removeQuery
@param {String} url URL.
@return {String} Queryless path.
**/
removeQuery: function (url) {
return url.replace(/\?.*$/, '');
},
/**
Replaces the current browser history entry with a new one, and dispatches to
the first matching route handler, if any.
Behind the scenes, this method uses HTML5 `pushState()` in browsers that
support it (or the location hash in older browsers and IE) to change the
URL.
The specified URL must share the same origin (i.e., protocol, host, and
port) as the current page, or an error will occur.
@example
// Starting URL: http://example.com/
router.replace('/path/');
// New URL: http://example.com/path/
router.replace('/path?foo=bar');
// New URL: http://example.com/path?foo=bar
router.replace('/');
// New URL: http://example.com/
@method replace
@param {String} [url] URL to set. This URL needs to be of the same origin as
the current URL. This can be a URL relative to the router's `root`
attribute. If no URL is specified, the page's current URL will be used.
@chainable
@see save()
**/
replace: function (url) {
return this._queue(url, true);
},
/**
Adds a route handler for the specified `route`.
The `route` parameter may be a string or regular expression to represent a
URL path, or a route object. If it's a string (which is most common), it may
contain named parameters: `:param` will match any single part of a URL path
(not including `/` characters), and `*param` will match any number of parts
of a URL path (including `/` characters). These named parameters will be
made available as keys on the `req.params` object that's passed to route
handlers.
If the `route` parameter is a regex, all pattern matches will be made
available as numbered keys on `req.params`, starting with `0` for the full
match, then `1` for the first subpattern match, and so on.
Alternatively, an object can be provided to represent the route and it may
contain a `path` property which is a string or regular expression which
causes the route to be process as described above. If the route object
already contains a `regex` or `regexp` property, the route will be
considered fully-processed and will be associated with any `callacks`
specified on the object and those specified as parameters to this method.
**Note:** Any additional data contained on the route object will be
preserved.
Here's a set of sample routes along with URL paths that they match:
* Route: `/photos/:tag/:page`
* URL: `/photos/kittens/1`, params: `{tag: 'kittens', page: '1'}`
* URL: `/photos/puppies/2`, params: `{tag: 'puppies', page: '2'}`
* Route: `/file/*path`
* URL: `/file/foo/bar/baz.txt`, params: `{path: 'foo/bar/baz.txt'}`
* URL: `/file/foo`, params: `{path: 'foo'}`
**Middleware**: Routes also support an arbitrary number of callback
functions. This allows you to easily reuse parts of your route-handling code
with different route. This method is liberal in how it processes the
specified `callbacks`, you can specify them as separate arguments, or as
arrays, or both.
If multiple route match a given URL, they will be executed in the order they
were added. The first route that was added will be the first to be executed.
**Passing Control**: Invoking the `next()` function within a route callback
will pass control to the next callback function (if any) or route handler
(if any). If a value is passed to `next()`, it's assumed to be an error,
therefore stopping the dispatch chain, unless that value is: `"route"`,
which is special case and dispatching will skip to the next route handler.
This allows middleware to skip any remaining middleware for a particular
route.
@example
router.route('/photos/:tag/:page', function (req, res, next) {
});
// Using middleware.
router.findUser = function (req, res, next) {
req.user = this.get('users').findById(req.params.user);
next();
};
router.route('/users/:user', 'findUser', function (req, res, next) {
// The `findUser` middleware puts the `user` object on the `req`.
});
@method route
@param {String|RegExp|Object} route Route to match. May be a string or a
regular expression, or a route object.
@param {Array|Function|String} callbacks* Callback functions to call
whenever this route is triggered. These can be specified as separate
arguments, or in arrays, or both. If a callback is specified as a
string, the named function will be called on this router instance.
@param {Object} callbacks.req Request object containing information about
the request. It contains the following properties.
@param {Array|Object} callbacks.req.params Captured parameters matched
by the route path specification. If a string path was used and
contained named parameters, then this will be a key/value hash mapping
parameter names to their matched values. If a regex path was used,
this will be an array of subpattern matches starting at index 0 for
the full match, then 1 for the first subpattern match, and so on.
@param {String} callbacks.req.path The current URL path.
@param {Number} callbacks.req.pendingCallbacks Number of remaining
callbacks the route handler has after this one in the dispatch chain.
@param {Number} callbacks.req.pendingRoutes Number of matching routes
after this one in the dispatch chain.
@param {Object} callbacks.req.query Query hash representing the URL
query string, if any. Parameter names are keys, and are mapped to
parameter values.
@param {Object} callbacks.req.route Reference to the current route
object whose callbacks are being dispatched.
@param {Object} callbacks.req.router Reference to this router instance.
@param {String} callbacks.req.src What initiated the dispatch. In an
HTML5 browser, when the back/forward buttons are used, this property
will have a value of "popstate". When the `dispath()` method is
called, the `src` will be `"dispatch"`.
@param {String} callbacks.req.url The full URL.
@param {Object} callbacks.res Response object containing methods and
information that relate to responding to a request. It contains the
following properties.
@param {Object} callbacks.res.req Reference to the request object.
@param {Function} callbacks.next Function to pass control to the next
callback or the next matching route if no more callbacks (middleware)
exist for the current route handler. If you don't call this function,
then no further callbacks or route handlers will be executed, even if
there are more that match. If you do call this function, then the next
callback (if any) or matching route handler (if any) will be called.
All of these functions will receive the same `req` and `res` objects
that were passed to this route (so you can use these objects to pass
data along to subsequent callbacks and routes).
@param {String} [callbacks.next.err] Optional error which will stop the
dispatch chaining for this `req`, unless the value is `"route"`, which
is special cased to jump skip past any callbacks for the current route
and pass control the next route handler.
@chainable
**/
route: function (route, callbacks) {
// Grab callback functions from var-args.
callbacks = YArray(arguments, 1, true);
var keys, regex;
// Supports both the `route(path, callbacks)` and `route(config)` call
// signatures, allowing for fully-processed route configs to be passed.
if (typeof route === 'string' || YLang.isRegExp(route)) {
// Flatten `callbacks` into a single dimension array.
callbacks = YArray.flatten(callbacks);
keys = [];
regex = this._getRegex(route, keys);
route = {
callbacks: callbacks,
keys : keys,
path : route,
regex : regex
};
} else {
// Look for any configured `route.callbacks` and fallback to
// `route.callback` for back-compat, append var-arg `callbacks`,
// then flatten the entire collection to a single dimension array.
callbacks = YArray.flatten(
[route.callbacks || route.callback || []].concat(callbacks)
);
// Check for previously generated regex, also fallback to `regexp`
// for greater interop.
keys = route.keys;
regex = route.regex || route.regexp;
// Generates the route's regex if it doesn't already have one.
if (!regex) {
keys = [];
regex = this._getRegex(route.path, keys);
}
// Merge specified `route` config object with processed data.
route = Y.merge(route, {
callbacks: callbacks,
keys : keys,
path : route.path || regex,
regex : regex
});
}
this._routes.push(route);
return this;
},
/**
Saves a new browser history entry and dispatches to the first matching route
handler, if any.
Behind the scenes, this method uses HTML5 `pushState()` in browsers that
support it (or the location hash in older browsers and IE) to change the
URL and create a history entry.
The specified URL must share the same origin (i.e., protocol, host, and
port) as the current page, or an error will occur.
@example
// Starting URL: http://example.com/
router.save('/path/');
// New URL: http://example.com/path/
router.save('/path?foo=bar');
// New URL: http://example.com/path?foo=bar
router.save('/');
// New URL: http://example.com/
@method save
@param {String} [url] URL to set. This URL needs to be of the same origin as
the current URL. This can be a URL relative to the router's `root`
attribute. If no URL is specified, the page's current URL will be used.
@chainable
@see replace()
**/
save: function (url) {
return this._queue(url);
},
/**
Upgrades a hash-based URL to an HTML5 URL if necessary. In non-HTML5
browsers, this method is a noop.
@method upgrade
@return {Boolean} `true` if the URL was upgraded, `false` otherwise.
**/
upgrade: function () {
if (!this._html5) {
return false;
}
// Get the resolve hash path.
var hashPath = this._getHashPath();
if (hashPath) {
// This is an HTML5 browser and we have a hash-based path in the
// URL, so we need to upgrade the URL to a non-hash URL. This
// will trigger a `history:change` event, which will in turn
// trigger a dispatch.
this.once(EVT_READY, function () {
this.replace(hashPath);
});
return true;
}
return false;
},
// -- Protected Methods ----------------------------------------------------
/**
Wrapper around `decodeURIComponent` that also converts `+` chars into
spaces.
@method _decode
@param {String} string String to decode.
@return {String} Decoded string.
@protected
**/
_decode: function (string) {
return decodeURIComponent(string.replace(/\+/g, ' '));
},
/**
Shifts the topmost `_save()` call off the queue and executes it. Does
nothing if the queue is empty.
@method _dequeue
@chainable
@see _queue
@protected
**/
_dequeue: function () {
var self = this,
fn;
// If window.onload hasn't yet fired, wait until it has before
// dequeueing. This will ensure that we don't call pushState() before an
// initial popstate event has fired.
if (!YUI.Env.windowLoaded) {
Y.once('load', function () {
self._dequeue();
});
return this;
}
fn = saveQueue.shift();
return fn ? fn() : this;
},
/**
Dispatches to the first route handler that matches the specified _path_.
If called before the `ready` event has fired, the dispatch will be aborted.
This ensures normalized behavior between Chrome (which fires a `popstate`
event on every pageview) and other browsers (which do not).
@method _dispatch
@param {object} req Request object.
@param {String} res Response object.
@chainable
@protected
**/
_dispatch: function (req, res) {
var self = this,
routes = self.match(req.path),
callbacks = [],
routePath, paramValues;
self._dispatching = self._dispatched = true;
if (!routes || !routes.length) {
self._dispatching = false;
return self;
}
routePath = self.removeRoot(req.path);
function next(err) {
var callback, name, route;
if (err) {
// Special case "route" to skip to the next route handler
// avoiding any additional callbacks for the current route.
if (err === 'route') {
callbacks = [];
next();
} else {
Y.error(err);
}
} else if ((callback = callbacks.shift())) {
if (typeof callback === 'string') {
name = callback;
callback = self[name];
if (!callback) {
Y.error('Router: Callback not found: ' + name, null, 'router');
}
}
// Allow access to the number of remaining callbacks for the
// route.
req.pendingCallbacks = callbacks.length;
callback.call(self, req, res, next);
} else if ((route = routes.shift())) {
paramValues = self._getParamValues(route, routePath);
if (!paramValues) {
// Skip this route because one of the param handlers
// rejected a param value in the `routePath`.
next('route');
return;
}
// Expose the processed param values.
req.params = paramValues;
// Allow access to current route and the number of remaining
// routes for this request.
req.route = route;
req.pendingRoutes = routes.length;
// Make a copy of this route's `callbacks` so the original array
// is preserved.
callbacks = route.callbacks.concat();
// Execute this route's `callbacks`.
next();
}
}
next();
self._dispatching = false;
return self._dequeue();
},
/**
Returns the resolved path from the hash fragment, or an empty string if the
hash is not path-like.
@method _getHashPath
@param {String} [hash] Hash fragment to resolve into a path. By default this<|fim▁hole|> @protected
**/
_getHashPath: function (hash) {
hash || (hash = HistoryHash.getHash());
// Make sure the `hash` is path-like.
if (hash && hash.charAt(0) === '/') {
return this._joinURL(hash);
}
return '';
},
/**
Gets the location origin (i.e., protocol, host, and port) as a URL.
@example
http://example.com
@method _getOrigin
@return {String} Location origin (i.e., protocol, host, and port).
@protected
**/
_getOrigin: function () {
var location = Y.getLocation();
return location.origin || (location.protocol + '//' + location.host);
},
/**
Getter for the `params` attribute.
@method _getParams
@return {Object} Mapping of param handlers: `name` -> RegExp | Function.
@protected
@since 3.12.0
**/
_getParams: function () {
return Y.merge(this._params);
},
/**
Gets the param values for the specified `route` and `path`, suitable to use
form `req.params`.
**Note:** This method will return `false` if a named param handler rejects a
param value.
@method _getParamValues
@param {Object} route The route to get param values for.
@param {String} path The route path (root removed) that provides the param
values.
@return {Boolean|Array|Object} The collection of processed param values.
Either a hash of `name` -> `value` for named params processed by this
router's param handlers, or an array of matches for a route with unnamed
params. If a named param handler rejects a value, then `false` will be
returned.
@protected
@since 3.16.0
**/
_getParamValues: function (route, path) {
var matches, paramsMatch, paramValues;
// Decode each of the path params so that the any URL-encoded path
// segments are decoded in the `req.params` object.
matches = YArray.map(route.regex.exec(path) || [], function (match) {
// Decode matches, or coerce `undefined` matches to an empty
// string to match expectations of working with `req.params`
// in the context of route dispatching, and normalize
// browser differences in their handling of regex NPCGs:
// https://github.com/yui/yui3/issues/1076
return (match && this._decode(match)) || '';
}, this);
// Simply return the array of decoded values when the route does *not*
// use named parameters.
if (matches.length - 1 !== route.keys.length) {
return matches;
}
// Remove the first "match" from the param values, because it's just the
// `path` processed by the route's regex, and map the values to the keys
// to create the name params collection.
paramValues = YArray.hash(route.keys, matches.slice(1));
// Pass each named param value to its handler, if there is one, for
// validation/processing. If a param value is rejected by a handler,
// then the params don't match and a falsy value is returned.
paramsMatch = YArray.every(route.keys, function (name) {
var paramHandler = this._params[name],
value = paramValues[name];
if (paramHandler && value && typeof value === 'string') {
// Check if `paramHandler` is a RegExp, because this
// is true in Android 2.3 and other browsers!
// `typeof /.*/ === 'function'`
value = YLang.isRegExp(paramHandler) ?
paramHandler.exec(value) :
paramHandler.call(this, value, name);
if (value !== false && YLang.isValue(value)) {
// Update the named param to the value from the handler.
paramValues[name] = value;
return true;
}
// Consider the param value as rejected by the handler.
return false;
}
return true;
}, this);
if (paramsMatch) {
return paramValues;
}
// Signal that a param value was rejected by a named param handler.
return false;
},
/**
Gets the current route path.
@method _getPath
@return {String} Current route path.
@protected
**/
_getPath: function () {
var path = (!this._html5 && this._getHashPath()) ||
Y.getLocation().pathname;
return this.removeQuery(path);
},
/**
Returns the current path root after popping off the last path segment,
making it useful for resolving other URL paths against.
The path root will always begin and end with a '/'.
@method _getPathRoot
@return {String} The URL's path root.
@protected
@since 3.5.0
**/
_getPathRoot: function () {
var slash = '/',
path = Y.getLocation().pathname,
segments;
if (path.charAt(path.length - 1) === slash) {
return path;
}
segments = path.split(slash);
segments.pop();
return segments.join(slash) + slash;
},
/**
Gets the current route query string.
@method _getQuery
@return {String} Current route query string.
@protected
**/
_getQuery: function () {
var location = Y.getLocation(),
hash, matches;
if (this._html5) {
return location.search.substring(1);
}
hash = HistoryHash.getHash();
matches = hash.match(this._regexUrlQuery);
return hash && matches ? matches[1] : location.search.substring(1);
},
/**
Creates a regular expression from the given route specification. If _path_
is already a regex, it will be returned unmodified.
@method _getRegex
@param {String|RegExp} path Route path specification.
@param {Array} keys Array reference to which route parameter names will be
added.
@return {RegExp} Route regex.
@protected
**/
_getRegex: function (path, keys) {
if (YLang.isRegExp(path)) {
return path;
}
// Special case for catchall paths.
if (path === '*') {
return (/.*/);
}
path = path.replace(this._regexPathParam, function (match, operator, key) {
// Only `*` operators are supported for key-less matches to allowing
// in-path wildcards like: '/foo/*'.
if (!key) {
return operator === '*' ? '.*' : match;
}
keys.push(key);
return operator === '*' ? '(.*?)' : '([^/#?]+)';
});
return new RegExp('^' + path + '$');
},
/**
Gets a request object that can be passed to a route handler.
@method _getRequest
@param {String} src What initiated the URL change and need for the request.
@return {Object} Request object.
@protected
**/
_getRequest: function (src) {
return {
path : this._getPath(),
query : this._parseQuery(this._getQuery()),
url : this._getURL(),
router: this,
src : src
};
},
/**
Gets a response object that can be passed to a route handler.
@method _getResponse
@param {Object} req Request object.
@return {Object} Response Object.
@protected
**/
_getResponse: function (req) {
return {req: req};
},
/**
Getter for the `routes` attribute.
@method _getRoutes
@return {Object[]} Array of route objects.
@protected
**/
_getRoutes: function () {
return this._routes.concat();
},
/**
Gets the current full URL.
@method _getURL
@return {String} URL.
@protected
**/
_getURL: function () {
var url = Y.getLocation().toString();
if (!this._html5) {
url = this._upgradeURL(url);
}
return url;
},
/**
Returns `true` when the specified `url` is from the same origin as the
current URL; i.e., the protocol, host, and port of the URLs are the same.
All host or path relative URLs are of the same origin. A scheme-relative URL
is first prefixed with the current scheme before being evaluated.
@method _hasSameOrigin
@param {String} url URL to compare origin with the current URL.
@return {Boolean} Whether the URL has the same origin of the current URL.
@protected
**/
_hasSameOrigin: function (url) {
var origin = ((url && url.match(this._regexUrlOrigin)) || [])[0];
// Prepend current scheme to scheme-relative URLs.
if (origin && origin.indexOf('//') === 0) {
origin = Y.getLocation().protocol + origin;
}
return !origin || origin === this._getOrigin();
},
/**
Joins the `root` URL to the specified _url_, normalizing leading/trailing
`/` characters.
@example
router.set('root', '/foo');
router._joinURL('bar'); // => '/foo/bar'
router._joinURL('/bar'); // => '/foo/bar'
router.set('root', '/foo/');
router._joinURL('bar'); // => '/foo/bar'
router._joinURL('/bar'); // => '/foo/bar'
@method _joinURL
@param {String} url URL to append to the `root` URL.
@return {String} Joined URL.
@protected
**/
_joinURL: function (url) {
var root = this.get('root');
// Causes `url` to _always_ begin with a "/".
url = this.removeRoot(url);
if (url.charAt(0) === '/') {
url = url.substring(1);
}
return root && root.charAt(root.length - 1) === '/' ?
root + url :
root + '/' + url;
},
/**
Returns a normalized path, ridding it of any '..' segments and properly
handling leading and trailing slashes.
@method _normalizePath
@param {String} path URL path to normalize.
@return {String} Normalized path.
@protected
@since 3.5.0
**/
_normalizePath: function (path) {
var dots = '..',
slash = '/',
i, len, normalized, segments, segment, stack;
if (!path || path === slash) {
return slash;
}
segments = path.split(slash);
stack = [];
for (i = 0, len = segments.length; i < len; ++i) {
segment = segments[i];
if (segment === dots) {
stack.pop();
} else if (segment) {
stack.push(segment);
}
}
normalized = slash + stack.join(slash);
// Append trailing slash if necessary.
if (normalized !== slash && path.charAt(path.length - 1) === slash) {
normalized += slash;
}
return normalized;
},
/**
Parses a URL query string into a key/value hash. If `Y.QueryString.parse` is
available, this method will be an alias to that.
@method _parseQuery
@param {String} query Query string to parse.
@return {Object} Hash of key/value pairs for query parameters.
@protected
**/
_parseQuery: QS && QS.parse ? QS.parse : function (query) {
var decode = this._decode,
params = query.split('&'),
i = 0,
len = params.length,
result = {},
param;
for (; i < len; ++i) {
param = params[i].split('=');
if (param[0]) {
result[decode(param[0])] = decode(param[1] || '');
}
}
return result;
},
/**
Returns `true` when the specified `path` is semantically within the
specified `root` path.
If the `root` does not end with a trailing slash ("/"), one will be added
before the `path` is evaluated against the root path.
@example
this._pathHasRoot('/app', '/app/foo'); // => true
this._pathHasRoot('/app/', '/app/foo'); // => true
this._pathHasRoot('/app/', '/app/'); // => true
this._pathHasRoot('/app', '/foo/bar'); // => false
this._pathHasRoot('/app/', '/foo/bar'); // => false
this._pathHasRoot('/app/', '/app'); // => false
this._pathHasRoot('/app', '/app'); // => false
@method _pathHasRoot
@param {String} root Root path used to evaluate whether the specificed
`path` is semantically within. A trailing slash ("/") will be added if
it does not already end with one.
@param {String} path Path to evaluate for containing the specified `root`.
@return {Boolean} Whether or not the `path` is semantically within the
`root` path.
@protected
@since 3.13.0
**/
_pathHasRoot: function (root, path) {
var rootPath = root.charAt(root.length - 1) === '/' ? root : root + '/';
return path.indexOf(rootPath) === 0;
},
/**
Queues up a `_save()` call to run after all previously-queued calls have
finished.
This is necessary because if we make multiple `_save()` calls before the
first call gets dispatched, then both calls will dispatch to the last call's
URL.
All arguments passed to `_queue()` will be passed on to `_save()` when the
queued function is executed.
@method _queue
@chainable
@see _dequeue
@protected
**/
_queue: function () {
var args = arguments,
self = this;
saveQueue.push(function () {
if (self._html5) {
if (Y.UA.ios && Y.UA.ios < 5) {
// iOS <5 has buggy HTML5 history support, and needs to be
// synchronous.
self._save.apply(self, args);
} else {
// Wrapped in a timeout to ensure that _save() calls are
// always processed asynchronously. This ensures consistency
// between HTML5- and hash-based history.
setTimeout(function () {
self._save.apply(self, args);
}, 1);
}
} else {
self._dispatching = true; // otherwise we'll dequeue too quickly
self._save.apply(self, args);
}
return self;
});
return !this._dispatching ? this._dequeue() : this;
},
/**
Returns the normalized result of resolving the `path` against the current
path. Falsy values for `path` will return just the current path.
@method _resolvePath
@param {String} path URL path to resolve.
@return {String} Resolved path.
@protected
@since 3.5.0
**/
_resolvePath: function (path) {
if (!path) {
return Y.getLocation().pathname;
}
if (path.charAt(0) !== '/') {
path = this._getPathRoot() + path;
}
return this._normalizePath(path);
},
/**
Resolves the specified URL against the current URL.
This method resolves URLs like a browser does and will always return an
absolute URL. When the specified URL is already absolute, it is assumed to
be fully resolved and is simply returned as is. Scheme-relative URLs are
prefixed with the current protocol. Relative URLs are giving the current
URL's origin and are resolved and normalized against the current path root.
@method _resolveURL
@param {String} url URL to resolve.
@return {String} Resolved URL.
@protected
@since 3.5.0
**/
_resolveURL: function (url) {
var parts = url && url.match(this._regexURL),
origin, path, query, hash, resolved;
if (!parts) {
return Y.getLocation().toString();
}
origin = parts[1];
path = parts[2];
query = parts[3];
hash = parts[4];
// Absolute and scheme-relative URLs are assumed to be fully-resolved.
if (origin) {
// Prepend the current scheme for scheme-relative URLs.
if (origin.indexOf('//') === 0) {
origin = Y.getLocation().protocol + origin;
}
return origin + (path || '/') + (query || '') + (hash || '');
}
// Will default to the current origin and current path.
resolved = this._getOrigin() + this._resolvePath(path);
// A path or query for the specified URL trumps the current URL's.
if (path || query) {
return resolved + (query || '') + (hash || '');
}
query = this._getQuery();
return resolved + (query ? ('?' + query) : '') + (hash || '');
},
/**
Saves a history entry using either `pushState()` or the location hash.
This method enforces the same-origin security constraint; attempting to save
a `url` that is not from the same origin as the current URL will result in
an error.
@method _save
@param {String} [url] URL for the history entry.
@param {Boolean} [replace=false] If `true`, the current history entry will
be replaced instead of a new one being added.
@chainable
@protected
**/
_save: function (url, replace) {
var urlIsString = typeof url === 'string',
currentPath, root, hash;
// Perform same-origin check on the specified URL.
if (urlIsString && !this._hasSameOrigin(url)) {
Y.error('Security error: The new URL must be of the same origin as the current URL.');
return this;
}
// Joins the `url` with the `root`.
if (urlIsString) {
url = this._joinURL(url);
}
// Force _ready to true to ensure that the history change is handled
// even if _save is called before the `ready` event fires.
this._ready = true;
if (this._html5) {
this._history[replace ? 'replace' : 'add'](null, {url: url});
} else {
currentPath = Y.getLocation().pathname;
root = this.get('root');
hash = HistoryHash.getHash();
if (!urlIsString) {
url = hash;
}
// Determine if the `root` already exists in the current location's
// `pathname`, and if it does then we can exclude it from the
// hash-based path. No need to duplicate the info in the URL.
if (root === currentPath || root === this._getPathRoot()) {
url = this.removeRoot(url);
}
// The `hashchange` event only fires when the new hash is actually
// different. This makes sure we'll always dequeue and dispatch
// _all_ router instances, mimicking the HTML5 behavior.
if (url === hash) {
Y.Router.dispatch();
} else {
HistoryHash[replace ? 'replaceHash' : 'setHash'](url);
}
}
return this;
},
/**
Setter for the `params` attribute.
@method _setParams
@param {Object} params Map in the form: `name` -> RegExp | Function.
@return {Object} The map of params: `name` -> RegExp | Function.
@protected
@since 3.12.0
**/
_setParams: function (params) {
this._params = {};
YObject.each(params, function (regex, name) {
this.param(name, regex);
}, this);
return Y.merge(this._params);
},
/**
Setter for the `routes` attribute.
@method _setRoutes
@param {Object[]} routes Array of route objects.
@return {Object[]} Array of route objects.
@protected
**/
_setRoutes: function (routes) {
this._routes = [];
YArray.each(routes, function (route) {
this.route(route);
}, this);
return this._routes.concat();
},
/**
Upgrades a hash-based URL to a full-path URL, if necessary.
The specified `url` will be upgraded if its of the same origin as the
current URL and has a path-like hash. URLs that don't need upgrading will be
returned as-is.
@example
app._upgradeURL('http://example.com/#/foo/'); // => 'http://example.com/foo/';
@method _upgradeURL
@param {String} url The URL to upgrade from hash-based to full-path.
@return {String} The upgraded URL, or the specified URL untouched.
@protected
@since 3.5.0
**/
_upgradeURL: function (url) {
// We should not try to upgrade paths for external URLs.
if (!this._hasSameOrigin(url)) {
return url;
}
var hash = (url.match(/#(.*)$/) || [])[1] || '',
hashPrefix = Y.HistoryHash.hashPrefix,
hashPath;
// Strip any hash prefix, like hash-bangs.
if (hashPrefix && hash.indexOf(hashPrefix) === 0) {
hash = hash.replace(hashPrefix, '');
}
// If the hash looks like a URL path, assume it is, and upgrade it!
if (hash) {
hashPath = this._getHashPath(hash);
if (hashPath) {
return this._resolveURL(hashPath);
}
}
return url;
},
// -- Protected Event Handlers ---------------------------------------------
/**
Handles `history:change` and `hashchange` events.
@method _afterHistoryChange
@param {EventFacade} e
@protected
**/
_afterHistoryChange: function (e) {
var self = this,
src = e.src,
prevURL = self._url,
currentURL = self._getURL(),
req, res;
self._url = currentURL;
// Handles the awkwardness that is the `popstate` event. HTML5 browsers
// fire `popstate` right before they fire `hashchange`, and Chrome fires
// `popstate` on page load. If this router is not ready or the previous
// and current URLs only differ by their hash, then we want to ignore
// this `popstate` event.
if (src === 'popstate' &&
(!self._ready || prevURL.replace(/#.*$/, '') === currentURL.replace(/#.*$/, ''))) {
return;
}
req = self._getRequest(src);
res = self._getResponse(req);
self._dispatch(req, res);
},
// -- Default Event Handlers -----------------------------------------------
/**
Default handler for the `ready` event.
@method _defReadyFn
@param {EventFacade} e
@protected
**/
_defReadyFn: function (e) {
this._ready = true;
}
}, {
// -- Static Properties ----------------------------------------------------
NAME: 'router',
ATTRS: {
/**
Whether or not this browser is capable of using HTML5 history.
Setting this to `false` will force the use of hash-based history even on
HTML5 browsers, but please don't do this unless you understand the
consequences.
@attribute html5
@type Boolean
@initOnly
**/
html5: {
// Android versions lower than 3.0 are buggy and don't update
// window.location after a pushState() call, so we fall back to
// hash-based history for them.
//
// See http://code.google.com/p/android/issues/detail?id=17471
valueFn: function () { return Y.Router.html5; },
writeOnce: 'initOnly'
},
/**
Map of params handlers in the form: `name` -> RegExp | Function.
If a param handler regex or function returns a value of `false`, `null`,
`undefined`, or `NaN`, the current route will not match and be skipped.
All other return values will be used in place of the original param
value parsed from the URL.
This attribute is intended to be used to set params at init time, or to
completely reset all params after init. To add params after init without
resetting all existing params, use the `param()` method.
@attribute params
@type Object
@default `{}`
@see param
@since 3.12.0
**/
params: {
value : {},
getter: '_getParams',
setter: '_setParams'
},
/**
Absolute root path from which all routes should be evaluated.
For example, if your router is running on a page at
`http://example.com/myapp/` and you add a route with the path `/`, your
route will never execute, because the path will always be preceded by
`/myapp`. Setting `root` to `/myapp` would cause all routes to be
evaluated relative to that root URL, so the `/` route would then execute
when the user browses to `http://example.com/myapp/`.
@example
router.set('root', '/myapp');
router.route('/foo', function () { ... });
// Updates the URL to: "/myapp/foo"
router.save('/foo');
@attribute root
@type String
@default `''`
**/
root: {
value: ''
},
/**
Array of route objects.
Each item in the array must be an object with the following properties
in order to be processed by the router:
* `path`: String or regex representing the path to match. See the docs
for the `route()` method for more details.
* `callbacks`: Function or a string representing the name of a
function on this router instance that should be called when the
route is triggered. An array of functions and/or strings may also be
provided. See the docs for the `route()` method for more details.
If a route object contains a `regex` or `regexp` property, or if its
`path` is a regular express, then the route will be considered to be
fully-processed. Any fully-processed routes may contain the following
properties:
* `regex`: The regular expression representing the path to match, this
property may also be named `regexp` for greater compatibility.
* `keys`: Array of named path parameters used to populate `req.params`
objects when dispatching to route handlers.
Any additional data contained on these route objects will be retained.
This is useful to store extra metadata about a route; e.g., a `name` to
give routes logical names.
This attribute is intended to be used to set routes at init time, or to
completely reset all routes after init. To add routes after init without
resetting all existing routes, use the `route()` method.
@attribute routes
@type Object[]
@default `[]`
@see route
**/
routes: {
value : [],
getter: '_getRoutes',
setter: '_setRoutes'
}
},
// Used as the default value for the `html5` attribute, and for testing.
html5: Y.HistoryBase.html5 && (!Y.UA.android || Y.UA.android >= 3),
// To make this testable.
_instances: instances,
/**
Dispatches to the first route handler that matches the specified `path` for
all active router instances.
This provides a mechanism to cause all active router instances to dispatch
to their route handlers without needing to change the URL or fire the
`history:change` or `hashchange` event.
@method dispatch
@static
@since 3.6.0
**/
dispatch: function () {
var i, len, router, req, res;
for (i = 0, len = instances.length; i < len; i += 1) {
router = instances[i];
if (router) {
req = router._getRequest('dispatch');
res = router._getResponse(req);
router._dispatch(req, res);
}
}
}
});
/**
The `Controller` class was deprecated in YUI 3.5.0 and is now an alias for the
`Router` class. Use that class instead. This alias will be removed in a future
version of YUI.
@class Controller
@constructor
@extends Base
@deprecated Use `Router` instead.
@see Router
**/
Y.Controller = Y.Router;
}, '3.16.0', {"optional": ["querystring-parse"], "requires": ["array-extras", "base-build", "history"]});<|fim▁end|> | will be the hash from the current URL.
@return {String} Current hash path, or an empty string if the hash is empty. |
<|file_name|>condvar.rs<|end_file_name|><|fim▁begin|>use crate::sys::mutex::Mutex;
use crate::time::Duration;
pub struct Condvar {}
pub type MovableCondvar = Condvar;
impl Condvar {
pub const fn new() -> Condvar {
Condvar {}
}<|fim▁hole|>
#[inline]
pub unsafe fn init(&mut self) {}
#[inline]
pub unsafe fn notify_one(&self) {}
#[inline]
pub unsafe fn notify_all(&self) {}
pub unsafe fn wait(&self, _mutex: &Mutex) {
panic!("condvar wait not supported")
}
pub unsafe fn wait_timeout(&self, _mutex: &Mutex, _dur: Duration) -> bool {
panic!("condvar wait not supported");
}
#[inline]
pub unsafe fn destroy(&self) {}
}<|fim▁end|> | |
<|file_name|>0023_auto_20190405_0911.py<|end_file_name|><|fim▁begin|># Generated by Django 2.1.8 on 2019-04-05 07:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0022_auto_20190404_1605'),
]
operations = [
migrations.AlterField(
model_name='historicalpaper',<|fim▁hole|> migrations.AlterField(
model_name='paper',
name='reference_number',
field=models.CharField(blank=True, max_length=50, null=True),
),
]<|fim▁end|> | name='reference_number',
field=models.CharField(blank=True, max_length=50, null=True),
), |
<|file_name|>http_ece.rs<|end_file_name|><|fim▁begin|>use ece::encrypt;
use crate::error::WebPushError;
use crate::message::WebPushPayload;
use crate::vapid::VapidSignature;
/// Content encoding profiles.
pub enum ContentEncoding {
//Make sure this enum remains exhaustive as that allows for easier migrations to new versions.
Aes128Gcm,
}
/// Struct for handling payload encryption.
pub struct HttpEce<'a> {
peer_public_key: &'a [u8],
peer_secret: &'a [u8],
encoding: ContentEncoding,
vapid_signature: Option<VapidSignature>,
}
impl<'a> HttpEce<'a> {
/// Create a new encryptor.<|fim▁hole|> /// browser subscription info.
pub fn new(
encoding: ContentEncoding,
peer_public_key: &'a [u8],
peer_secret: &'a [u8],
vapid_signature: Option<VapidSignature>,
) -> HttpEce<'a> {
HttpEce {
peer_public_key,
peer_secret,
encoding,
vapid_signature,
}
}
/// Encrypts a payload. The maximum length for the payload is 3800
/// characters, which is the largest that works with Google's and Mozilla's
/// push servers.
pub fn encrypt(&self, content: &'a [u8]) -> Result<WebPushPayload, WebPushError> {
if content.len() > 3052 {
return Err(WebPushError::PayloadTooLarge);
}
//Add more encoding standards to this match as they are created.
match self.encoding {
ContentEncoding::Aes128Gcm => {
let result = encrypt(self.peer_public_key, self.peer_secret, content);
let mut headers = Vec::new();
//VAPID uses a special Authorisation header, which contains a ecdhsa key and a jwt.
if let Some(signature) = &self.vapid_signature {
headers.push((
"Authorization",
format!(
"vapid t={}, k={}",
signature.auth_t,
base64::encode_config(&signature.auth_k, base64::URL_SAFE_NO_PAD)
),
));
}
match result {
Ok(data) => Ok(WebPushPayload {
content: data,
crypto_headers: headers,
content_encoding: "aes128gcm",
}),
_ => Err(WebPushError::InvalidCryptoKeys),
}
}
}
}
}
#[cfg(test)]
mod tests {
use base64::{self, URL_SAFE};
use regex::Regex;
use crate::error::WebPushError;
use crate::http_ece::{ContentEncoding, HttpEce};
use crate::VapidSignature;
use crate::WebPushPayload;
#[test]
fn test_payload_too_big() {
let p256dh = base64::decode_config(
"BLMaF9ffKBiWQLCKvTHb6LO8Nb6dcUh6TItC455vu2kElga6PQvUmaFyCdykxY2nOSSL3yKgfbmFLRTUaGv4yV8",
URL_SAFE,
)
.unwrap();
let auth = base64::decode_config("xS03Fj5ErfTNH_l9WHE9Ig", URL_SAFE).unwrap();
let http_ece = HttpEce::new(ContentEncoding::Aes128Gcm, &p256dh, &auth, None);
//This content is one above limit.
let content = [0u8; 3801];
assert_eq!(Err(WebPushError::PayloadTooLarge), http_ece.encrypt(&content));
}
/// Tests that the content encryption is properly reversible while using aes128gcm.
#[test]
fn test_payload_encrypts_128() {
let (key, auth) = ece::generate_keypair_and_auth_secret().unwrap();
let p_key = key.raw_components().unwrap();
let p_key = p_key.public_key();
let http_ece = HttpEce::new(ContentEncoding::Aes128Gcm, p_key, &auth, None);
let plaintext = "Hello world!";
let ciphertext = http_ece.encrypt(plaintext.as_bytes()).unwrap();
assert_ne!(plaintext.as_bytes(), ciphertext.content);
assert_eq!(
String::from_utf8(ece::decrypt(&key.raw_components().unwrap(), &auth, &ciphertext.content).unwrap())
.unwrap(),
plaintext
)
}
fn setup_payload(vapid_signature: Option<VapidSignature>, encoding: ContentEncoding) -> WebPushPayload {
let p256dh = base64::decode_config(
"BLMbF9ffKBiWQLCKvTHb6LO8Nb6dcUh6TItC455vu2kElga6PQvUmaFyCdykxY2nOSSL3yKgfbmFLRTUaGv4yV8",
URL_SAFE,
)
.unwrap();
let auth = base64::decode_config("xS03Fi5ErfTNH_l9WHE9Ig", URL_SAFE).unwrap();
let http_ece = HttpEce::new(encoding, &p256dh, &auth, vapid_signature);
let content = "Hello, world!".as_bytes();
http_ece.encrypt(content).unwrap()
}
#[test]
fn test_aes128gcm_headers_no_vapid() {
let wp_payload = setup_payload(None, ContentEncoding::Aes128Gcm);
assert_eq!(wp_payload.crypto_headers.len(), 0);
}
#[test]
fn test_aes128gcm_headers_vapid() {
let auth_re = Regex::new(r"vapid t=(?P<sig_t>[^,]*), k=(?P<sig_k>[^,]*)").unwrap();
let vapid_signature = VapidSignature {
auth_t: String::from("foo"),
auth_k: String::from("bar").into_bytes(),
};
let wp_payload = setup_payload(Some(vapid_signature), ContentEncoding::Aes128Gcm);
assert_eq!(wp_payload.crypto_headers.len(), 1);
let auth = wp_payload.crypto_headers[0].clone();
assert_eq!(auth.0, "Authorization");
assert!(auth_re.captures(&auth.1).is_some());
}
}<|fim▁end|> | ///
/// `peer_public_key` is the `p256dh` and `peer_secret` the `auth` from |
<|file_name|>inventory.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import absolute_import
from collections import OrderedDict
from linchpin.InventoryFilters.InventoryFilter import InventoryFilter
class Inventory(InventoryFilter):
DEFAULT_HOSTNAMES = ['public_ip']
def get_host_data(self, res, cfgs):
"""
Returns a dict of hostnames or IP addresses for use in an Ansible
inventory file, based on available data. Only a single hostname or IP
address will be returned per instance, so as to avoid duplicate runs of
Ansible on the same host via the generated inventory file.
Each hostname contains mappings of any variable that was defined in the
cfgs section of the PinFile (e.g. __IP__) to the value in the field that
corresponds with that variable in the cfgs.
By default, the hostname will be the public_ip field returned by gcloud
:param topo:
linchpin GCloud resource data<|fim▁hole|> map of config options from PinFile
"""
if res['resource_group'] != 'gcloud':
return OrderedDict()
if res['role'] == 'gcloud_gce':
return self.get_gcloud_gce_host_data(res, cfgs)
else:
return OrderedDict()
def get_gcloud_gce_host_data(self, res, cfgs):
host_data = OrderedDict()
var_data = cfgs.get('gcloud', {})
if var_data is None:
var_data = {}
for instance in res['instance_data']:
host = self.get_hostname(instance, var_data,
self.DEFAULT_HOSTNAMES)
hostname_var = host[0]
hostname = host[1]
host_data[hostname] = {}
if '__IP__' not in list(var_data.keys()):
var_data['__IP__'] = hostname_var
host_data[hostname] = {}
self.set_config_values(host_data[hostname], instance, var_data)
return host_data<|fim▁end|> |
:param cfgs: |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from django.contrib.gis.geos import (LineString, Polygon, MultiPolygon,
MultiLineString, MultiPoint, Point)
from django.core.exceptions import ValidationError
from django.conf import settings
from django.test.utils import override_settings
from unittest import skipIf
from bs4 import BeautifulSoup
from geotrek.common.tests import TranslationResetMixin
from geotrek.core.tests.factories import PathFactory
from geotrek.zoning.tests.factories import DistrictFactory, CityFactory
from geotrek.trekking.tests.factories import (POIFactory, TrekFactory,
TrekWithPOIsFactory, ServiceFactory,
RatingFactory, RatingScaleFactory)
from geotrek.trekking.models import Trek, OrderedTrekChild
class TrekTest(TranslationResetMixin, TestCase):
def test_is_publishable(self):
t = TrekFactory.create()
t.geom = LineString((0, 0), (1, 1))
self.assertTrue(t.has_geom_valid())
t.description_teaser = ''
self.assertFalse(t.is_complete())
self.assertFalse(t.is_publishable())
t.description_teaser = 'ba'
t.departure = 'zin'
t.arrival = 'ga'
self.assertTrue(t.is_complete())
self.assertTrue(t.is_publishable())
t.geom = MultiLineString([LineString((0, 0), (1, 1)), LineString((2, 2), (3, 3))])
self.assertFalse(t.has_geom_valid())
self.assertFalse(t.is_publishable())
def test_any_published_property(self):
t = TrekFactory.create(published=False)
t.published_fr = False
t.published_it = False
t.save()
self.assertFalse(t.any_published)
t.published_it = True
t.save()
self.assertTrue(t.any_published)
@override_settings(PUBLISHED_BY_LANG=False)
def test_any_published_without_published_by_lang(self):
t = TrekFactory.create(published=False)
t.published_fr = True
t.save()
self.assertFalse(t.any_published)
def test_published_status(self):
t = TrekFactory.create(published=False)
t.published_fr = False
t.published_it = True
t.save()
self.assertEqual(t.published_status, [
{'lang': 'en', 'language': 'English', 'status': False},
{'lang': 'es', 'language': 'Spanish', 'status': False},
{'lang': 'fr', 'language': 'French', 'status': False},
{'lang': 'it', 'language': 'Italian', 'status': True}])
@override_settings(PUBLISHED_BY_LANG=False)
def test_published_status_without_published_by_lang(self):
t = TrekFactory.create(published=True)
t.published_fr = False
t.published_it = False
t.save()
self.assertEqual(t.published_status, [
{'lang': 'en', 'language': 'English', 'status': True},
{'lang': 'es', 'language': 'Spanish', 'status': True},
{'lang': 'fr', 'language': 'French', 'status': True},
{'lang': 'it', 'language': 'Italian', 'status': True}])
@override_settings(PUBLISHED_BY_LANG=False)
def test_published_langs_without_published_by_lang_not_published(self):
t = TrekFactory.create(published=False)
t.published_fr = True
t.published_it = True
t.save()
self.assertEqual(t.published_langs, [])
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_kml_coordinates_should_be_3d(self):
trek = TrekWithPOIsFactory.create()
kml = trek.kml()
parsed = BeautifulSoup(kml, 'lxml')
for placemark in parsed.findAll('placemark'):
coordinates = placemark.find('coordinates')
tuples = [s.split(',') for s in coordinates.string.split(' ')]
self.assertTrue(all([len(i) == 3 for i in tuples]))
def test_pois_types(self):
trek = TrekWithPOIsFactory.create()
type0 = trek.pois[0].type
type1 = trek.pois[1].type
self.assertEqual(2, len(trek.poi_types))
self.assertIn(type0, trek.poi_types)
self.assertIn(type1, trek.poi_types)
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_delete_cascade(self):
p1 = PathFactory.create()
p2 = PathFactory.create()
t = TrekFactory.create(paths=[p1, p2])
# Everything should be all right before delete
self.assertTrue(t.published)
self.assertFalse(t.deleted)
self.assertEqual(t.aggregations.count(), 2)
# When a path is deleted
p1.delete()
t = Trek.objects.get(pk=t.pk)
self.assertFalse(t.published)
self.assertFalse(t.deleted)
self.assertEqual(t.aggregations.count(), 1)
# Reset published status
t.published = True
t.save()
# When all paths are deleted
p2.delete()
t = Trek.objects.get(pk=t.pk)
self.assertFalse(t.published)
self.assertTrue(t.deleted)
self.assertEqual(t.aggregations.count(), 0)
def test_treks_are_sorted_by_name(self):
TrekFactory.create(name='Cb')
TrekFactory.create(name='Ca')
TrekFactory.create(name='A')
TrekFactory.create(name='B')
self.assertQuerysetEqual(Trek.objects.all(),
['<Trek: A>', '<Trek: B>', '<Trek: Ca>', '<Trek: Cb>'],
ordered=False)
def test_trek_itself_as_parent(self):
"""
Test if a trek it is its own parent
"""
trek1 = TrekFactory.create(name='trek1')
OrderedTrekChild.objects.create(parent=trek1, child=trek1)
self.assertRaisesMessage(ValidationError,
"Cannot use itself as child trek.",
trek1.full_clean)
class TrekPublicationDateTest(TranslationResetMixin, TestCase):
def setUp(self):
self.trek = TrekFactory.create(published=False)
def test_default_value_is_null(self):
self.assertIsNone(self.trek.publication_date)
def test_takes_current_date_when_published_becomes_true(self):
self.trek.published = True
self.trek.save()
self.assertIsNotNone(self.trek.publication_date)
def test_becomes_null_when_unpublished(self):
self.test_takes_current_date_when_published_becomes_true()
self.trek.published = False
self.trek.save()
self.assertIsNone(self.trek.publication_date)
def test_date_is_not_updated_when_saved_again(self):
import datetime
self.test_takes_current_date_when_published_becomes_true()
old_date = datetime.date(2003, 8, 6)
self.trek.publication_date = old_date
self.trek.save()
self.assertEqual(self.trek.publication_date, old_date)
class RelatedObjectsTest(TranslationResetMixin, TestCase):
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_helpers(self):
p1 = PathFactory.create(geom=LineString((0, 0), (4, 4)))
p2 = PathFactory.create(geom=LineString((4, 4), (8, 8)))
trek = TrekFactory.create(paths=[(p1, 0.5, 1), (p2, 0, 1)])
poi = POIFactory.create(paths=[(p1, 0.6, 0.6)])
poi2 = POIFactory.create(paths=[(p1, 0.6, 0.6)])
service = ServiceFactory.create(paths=[(p1, 0.7, 0.7)])
service.type.practices.add(trek.practice)
trek.pois_excluded.add(poi2.pk)
# /!\ District are automatically linked to paths at DB level
d1 = DistrictFactory.create(geom=MultiPolygon(
Polygon(((-2, -2), (3, -2), (3, 3), (-2, 3), (-2, -2)))))
# Ensure related objects are accessible
self.assertCountEqual(trek.pois_excluded.all(), [poi2])
self.assertCountEqual(trek.all_pois, [poi, poi2])
self.assertCountEqual(trek.pois, [poi])
self.assertCountEqual(trek.services, [service])
self.assertCountEqual(poi.treks, [trek])
self.assertCountEqual(service.treks, [trek])
self.assertCountEqual(trek.districts, [d1])
# Ensure there is no duplicates
self.assertCountEqual(trek.pois_excluded.all(), [poi2])
self.assertCountEqual(trek.all_pois, [poi, poi2])
self.assertCountEqual(trek.pois, [poi])
self.assertCountEqual(trek.services, [service])
self.assertCountEqual(poi.treks, [trek])
self.assertCountEqual(service.treks, [trek])
d2 = DistrictFactory.create(geom=MultiPolygon(
Polygon(((3, 3), (9, 3), (9, 9), (3, 9), (3, 3)))))
self.assertCountEqual(trek.districts, [d1, d2])
@skipIf(settings.TREKKING_TOPOLOGY_ENABLED, 'Test without dynamic segmentation only')
def test_helpers_nds(self):
trek = TrekFactory.create(geom=LineString((2, 2), (8, 8)))
poi = POIFactory.create(geom=Point(2.4, 2.4))
poi2 = POIFactory.create(geom=Point(2.4, 2.4))
service = ServiceFactory.create(geom=Point(2.8, 2.8))
service.type.practices.add(trek.practice)
trek.pois_excluded.add(poi2.pk)
# /!\ District are automatically linked to paths at DB level
d1 = DistrictFactory.create(geom=MultiPolygon(
Polygon(((-2, -2), (3, -2), (3, 3), (-2, 3), (-2, -2)))))
# Ensure related objects are accessible
self.assertCountEqual(trek.pois_excluded.all(), [poi2])
self.assertCountEqual(trek.all_pois, [poi, poi2])
self.assertCountEqual(trek.pois, [poi])
self.assertCountEqual(trek.services, [service])
self.assertCountEqual(poi.treks, [trek])
self.assertCountEqual(service.treks, [trek])
self.assertCountEqual(trek.districts, [d1])
@skipIf(settings.TREKKING_TOPOLOGY_ENABLED, 'Test without dynamic segmentation only')
def test_deleted_pois_nds(self):
trek = TrekFactory.create(geom=LineString((0, 0), (4, 4)))
poi = POIFactory.create(geom=Point(2.4, 2.4))
self.assertCountEqual(trek.pois, [poi])
poi.delete()
self.assertCountEqual(trek.pois, [])
@skipIf(settings.TREKKING_TOPOLOGY_ENABLED, 'Test without dynamic segmentation only')
def test_deleted_services_nds(self):
trek = TrekFactory.create(geom=LineString((0, 0), (4, 4)))
service = ServiceFactory.create(geom=Point(2.4, 2.4))
service.type.practices.add(trek.practice)
self.assertCountEqual(trek.services, [service])
service.delete()
self.assertCountEqual(trek.services, [])
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_deleted_pois(self):
p1 = PathFactory.create(geom=LineString((0, 0), (4, 4)))
trek = TrekFactory.create(paths=[p1])
poi = POIFactory.create(paths=[(p1, 0.6, 0.6)])
self.assertCountEqual(trek.pois, [poi])
poi.delete()
self.assertCountEqual(trek.pois, [])
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_deleted_services(self):
p1 = PathFactory.create(geom=LineString((0, 0), (4, 4)))
trek = TrekFactory.create(paths=[p1])
service = ServiceFactory.create(paths=[(p1, 0.6, 0.6)])
service.type.practices.add(trek.practice)
self.assertCountEqual(trek.services, [service])
service.delete()
self.assertCountEqual(trek.services, [])
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_pois_should_be_ordered_by_progression(self):
p1 = PathFactory.create(geom=LineString((0, 0), (4, 4)))
p2 = PathFactory.create(geom=LineString((4, 4), (8, 8)))
self.trek = TrekFactory.create(paths=[p1, p2])
self.trek_reverse = TrekFactory.create(paths=[(p2, 0.8, 0), (p1, 1, 0.2)])
self.poi1 = POIFactory.create(paths=[(p1, 0.8, 0.8)])
self.poi2 = POIFactory.create(paths=[(p1, 0.3, 0.3)])
self.poi3 = POIFactory.create(paths=[(p2, 0.5, 0.5)])
pois = self.trek.pois
self.assertEqual([self.poi2, self.poi1, self.poi3], list(pois))
pois = self.trek_reverse.pois
self.assertEqual([self.poi3, self.poi1, self.poi2], list(pois))
@skipIf(settings.TREKKING_TOPOLOGY_ENABLED, 'Test without dynamic segmentation only')
def test_pois_is_not_ordered_by_progression(self):<|fim▁hole|>
self.poi1 = POIFactory.create(geom=Point(3.2, 3.2))
self.poi2 = POIFactory.create(geom=Point(1.2, 1.2))
self.poi3 = POIFactory.create(geom=Point(4, 4))
pois = self.trek.pois
self.assertCountEqual([self.poi1, self.poi2, self.poi3], pois)
pois = self.trek_reverse.pois
self.assertCountEqual([self.poi1, self.poi2, self.poi3], pois)
@skipIf(not settings.TREKKING_TOPOLOGY_ENABLED, 'Test with dynamic segmentation only')
def test_city_departure(self):
p1 = PathFactory.create(geom=LineString((0, 0), (5, 5)))
trek = TrekFactory.create(paths=[p1])
self.assertEqual(trek.city_departure, '')
city1 = CityFactory.create(geom=MultiPolygon(Polygon(((-1, -1), (3, -1), (3, 3),
(-1, 3), (-1, -1)))))
city2 = CityFactory.create(geom=MultiPolygon(Polygon(((3, 3), (9, 3), (9, 9),
(3, 9), (3, 3)))))
self.assertEqual([city for city in trek.cities], [city1, city2])
self.assertEqual(trek.city_departure, str(city1))
@skipIf(settings.TREKKING_TOPOLOGY_ENABLED, 'Test without dynamic segmentation only')
def test_city_departure_nds(self):
trek = TrekFactory.create(geom=LineString((0, 0), (5, 5)))
self.assertEqual(trek.city_departure, '')
city1 = CityFactory.create(geom=MultiPolygon(Polygon(((-1, -1), (3, -1), (3, 3),
(-1, 3), (-1, -1)))))
city2 = CityFactory.create(geom=MultiPolygon(Polygon(((3, 3), (9, 3), (9, 9),
(3, 9), (3, 3)))))
self.assertEqual([city for city in trek.cities], [city1, city2])
self.assertEqual(trek.city_departure, str(city1))
class TrekUpdateGeomTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.trek = TrekFactory.create(published=True, geom=LineString(((700000, 6600000), (700100, 6600100)), srid=2154))
def test_save_with_same_geom(self):
geom = LineString(((700000, 6600000), (700100, 6600100)), srid=2154)
self.trek.geom = geom
self.trek.save()
retrieve_trek = Trek.objects.get(pk=self.trek.pk)
self.assertTrue(retrieve_trek.geom.equals_exact(geom, tolerance=0.00001))
def test_save_with_another_geom(self):
geom = LineString(((-7, -7), (5, -7), (5, 5), (-7, 5), (-7, -7)), srid=2154)
self.trek.geom = geom
self.trek.save()
retrieve_trek = Trek.objects.get(pk=self.trek.pk)
if settings.TREKKING_TOPOLOGY_ENABLED:
self.assertFalse(retrieve_trek.geom.equals_exact(geom, tolerance=0.00001))
else:
self.assertTrue(retrieve_trek.geom.equals_exact(geom, tolerance=0.00001))
def test_save_with_provided_one_field_exclusion(self):
self.trek.save(update_fields=['geom'])
self.assertTrue(self.trek.pk)
def test_save_with_multiple_fields_exclusion(self):
new_trek = TrekFactory.create()
new_trek.description_en = 'Description Test update'
new_trek.ambiance = 'Very special ambiance, for test purposes.'
new_trek.save(update_fields=['description_en'])
db_trek = Trek.objects.get(pk=new_trek.pk)
self.assertTrue(db_trek.pk)
self.assertEqual(db_trek.description_en, 'Description Test update')
self.assertNotEqual(db_trek.ambiance, 'Very special ambiance, for test purposes.')
new_trek.save(update_fields=['ambiance_en'])
db_trek = Trek.objects.get(pk=new_trek.pk)
self.assertEqual(db_trek.ambiance_en, 'Very special ambiance, for test purposes.')
class TrekItinerancyTest(TestCase):
def test_next_previous(self):
trekA = TrekFactory(name="A")
trekB = TrekFactory(name="B")
trekC = TrekFactory(name="C")
trekD = TrekFactory(name="D")
OrderedTrekChild(parent=trekC, child=trekA, order=42).save()
OrderedTrekChild(parent=trekC, child=trekB, order=15).save()
OrderedTrekChild(parent=trekD, child=trekA, order=1).save()
self.assertEqual(list(trekA.children_id), [])
self.assertEqual(list(trekB.children_id), [])
self.assertEqual(list(trekC.children_id), [trekB.id, trekA.id])
self.assertEqual(list(trekD.children_id), [trekA.id])
self.assertEqual(trekA.next_id, {trekC.id: None, trekD.id: None})
self.assertEqual(trekB.next_id, {trekC.id: trekA.id})
self.assertEqual(trekC.next_id, {})
self.assertEqual(trekD.next_id, {})
self.assertEqual(trekA.previous_id, {trekC.id: trekB.id, trekD.id: None})
self.assertEqual(trekB.previous_id, {trekC.id: None})
self.assertEqual(trekC.previous_id, {})
self.assertEqual(trekD.previous_id, {})
def test_delete_child(self):
trekA = TrekFactory(name="A")
trekB = TrekFactory(name="B")
trekC = TrekFactory(name="C")
OrderedTrekChild(parent=trekA, child=trekB, order=1).save()
OrderedTrekChild(parent=trekA, child=trekC, order=2).save()
self.assertTrue(OrderedTrekChild.objects.filter(child=trekB).exists())
self.assertQuerysetEqual(trekA.children, ['<Trek: B>', '<Trek: C>'])
self.assertQuerysetEqual(trekB.parents, ['<Trek: A>'])
self.assertQuerysetEqual(trekC.parents, ['<Trek: A>'])
self.assertEqual(list(trekA.children_id), [trekB.id, trekC.id])
self.assertEqual(trekB.parents_id, [trekA.id])
self.assertEqual(trekC.parents_id, [trekA.id])
trekB.delete()
self.assertEqual(trekC.previous_id_for(trekA), None)
self.assertEqual(trekC.next_id_for(trekA), None)
self.assertEqual(trekC.next_id, {trekA.id: None})
self.assertEqual(trekC.previous_id, {trekA.id: None})
self.assertFalse(OrderedTrekChild.objects.filter(child=trekB).exists())
self.assertQuerysetEqual(trekA.children, ['<Trek: C>'])
self.assertQuerysetEqual(trekC.parents, ['<Trek: A>'])
self.assertEqual(list(trekA.children_id), [trekC.id])
self.assertEqual(trekC.parents_id, [trekA.id])
def test_delete_parent(self):
trekA = TrekFactory(name="A")
trekB = TrekFactory(name="B")
trekC = TrekFactory(name="C")
OrderedTrekChild(parent=trekB, child=trekA, order=1).save()
OrderedTrekChild(parent=trekC, child=trekA, order=2).save()
self.assertTrue(OrderedTrekChild.objects.filter(parent=trekB).exists())
self.assertQuerysetEqual(trekA.parents, ['<Trek: B>', '<Trek: C>'], ordered=False)
self.assertQuerysetEqual(trekB.children, ['<Trek: A>'])
self.assertQuerysetEqual(trekC.children, ['<Trek: A>'])
self.assertEqual(trekA.parents_id, [trekB.id, trekC.id])
self.assertEqual(list(trekB.children_id), [trekA.id])
self.assertEqual(list(trekC.children_id), [trekA.id])
trekB.delete()
self.assertEqual(trekA.previous_id_for(trekC), None)
self.assertEqual(trekA.next_id_for(trekC), None)
self.assertEqual(trekA.next_id, {trekC.id: None})
self.assertEqual(trekA.previous_id, {trekC.id: None})
self.assertFalse(OrderedTrekChild.objects.filter(parent=trekB).exists())
self.assertQuerysetEqual(trekA.parents, ['<Trek: C>'])
self.assertQuerysetEqual(trekC.children, ['<Trek: A>'])
self.assertEqual(trekA.parents_id, [trekC.id])
self.assertEqual(list(trekC.children_id), [trekA.id])
class MapImageExtentTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.trek = TrekFactory.create(
points_reference=MultiPoint([Point(0, 0), Point(1, 1)], srid=settings.SRID),
parking_location=Point(0, 0, srid=settings.SRID),
)
POIFactory.create(paths=[(cls.trek.paths.first(), 0.25, 0.25)], published=True)
def test_get_map_image_extent(self):
lng_min, lat_min, lng_max, lat_max = self.trek.get_map_image_extent()
self.assertAlmostEqual(lng_min, -1.3630812101179004)
self.assertAlmostEqual(lat_min, -5.983856309208769)
self.assertAlmostEqual(lng_max, 3.001303976720215)
self.assertAlmostEqual(lat_max, 46.50090044234927)
class RatingScaleTest(TestCase):
def test_ratingscale_str(self):
scale = RatingScaleFactory.create(name='Bar', practice__name='Foo')
self.assertEqual(str(scale), 'Bar (Foo)')
class RatingTest(TestCase):
def test_rating_str(self):
scale = RatingFactory.create(name='Bar')
self.assertEqual(str(scale), 'RatingScale : Bar')<|fim▁end|> | self.trek = TrekFactory.create(geom=LineString((0, 0), (8, 8)))
self.trek_reverse = TrekFactory.create(geom=LineString((6.4, 6.4), (0.8, 0.8))) |
<|file_name|>queryutils.py<|end_file_name|><|fim▁begin|>from msaf.models import dbsession, Sample, Marker, Batch
from msaf.lib.analytics import SampleSet
from itertools import cycle
import yaml
def load_yaml(yaml_text):
d = yaml.load( yaml_text )
instances = {}
for k in d:
if k == 'selector':
instances['selector'] = Selector.from_dict( d[k] )
elif k == 'filter':
instances['filter'] = Filter.from_dict( d[k] )
elif k == 'differentiation':
instances['differentiation'] = Differentiation.from_dict( d[k] )
else:
raise RuntimeError()
return instances
def save_yaml( instances ):
# we don't really need to save to YAML yet
pass
colours = cycle( [ 'red', 'green', 'blue', 'orange', 'purple', 'black', 'magenta',
'wheat', 'cyan', 'brown', 'slateblue', 'lightgreen' ] )
class Selector(object):
def __init__(self, samples = [], markers = []):
self.samples = []
self.markers = []
@staticmethod
def from_dict(d):
selector = Selector()
selector.samples = d['samples']
selector.markers = d['markers']
return selector
def to_dict(self):
return { 'samples': self.samples, 'markers': self.markers }
@staticmethod
def load(yaml_text):
d = yaml.load( yaml_text )
selector = Selector.from_dict( d )
return selector
def dump(self):
d = self.to_dict()
return yaml.dump( d )
def get_sample_ids(self, db):
""" return sample ids; db is SQLa dbsession handler """
pass
def get_marker_ids(self):
""" return marker ids; db is SQLa dbsession handler """
# self.markers is name
markers = [ Marker.search(name) for name in self.markers ]
return [ marker.id for marker in markers ]
def get_sample_sets(self, db=None):
if not db:
db = dbsession
sample_set = []
for label in self.samples:
if label == '__ALL__':
# single query
pass
sample_ids = []
sample_selector = self.samples[label]
for spec in sample_selector:
if 'query' in spec:
if '$' in spec['query']:
raise RuntimeError('query most not an advance one')
if 'batch' in spec:
query = spec['batch'] + '[batch] & (' + spec['query'] + ')'
elif 'codes' in spec:
batch = Batch.search(spec['batch'])
q = dbsession.query( Sample.id ).join( Batch ).filter( Batch.id == batch.id).filter( Sample.code.in_( spec['codes'] ) )
sample_ids += list( q )
if label == '__ALL__':
label = '-'
sample_set.append( SampleSet( location = '', year = 0,
label = label,
colour = next(colours),
sample_ids = sample_ids ) )
return sample_set
class Filter(object):
def __init__(self):
self.abs_threshold = 0
self.rel_threshold = 0
self.rel_cutoff = 0
self.sample_qual_threshold = 0
self.marker_qual_threshold = 0
self.sample_options = None
@staticmethod
def from_dict(d):
filter_params = Filter()
filter_params.abs_threshold = int( d['abs_threshold'] )
filter_params.rel_threshold = float( d['rel_threshold'] )
filter_params.rel_cutoff = float( d['rel_cutoff'] )
filter_params.sample_qual_threshold = float( d['sample_qual_threshold'] )
filter_params.marker_qual_threshold = float( d['marker_qual_threshold'] )
filter_params.sample_option = d['sample_option']
return filter_params
def to_dict(self):
pass
@staticmethod
def load(yaml_text):
pass
def dump(self):
pass
<|fim▁hole|>class Differentiation(object):
def __init__(self):
self.spatial = 0
self.temporal = 0
self.differentiation = 0
@staticmethod
def from_dict(d):
differentiation = Differentiation()
differentiation.spatial = d['spatial']
differentiation.temporal = d['temporal']
differentiation.detection = d['detection']
return differentiation
def to_dict(self):
pass
@staticmethod
def load(yaml_text):
pass
def dump(self):
pass
def create_group( selector ):
pass<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub mod network {
// Modules can be nested inside other modules
pub mod client {
pub fn connect() {}
}
pub mod server {
pub fn connect() {}
}
pub fn connect() {}
}
// Multiple modules can be defined in the same lib.rs
pub mod client {<|fim▁hole|>// module code can also be in a <name> direcotry with a mod.rs file.
pub mod networksep;
// Modules can be defined here, but the code lies in another file.
pub mod netclient;
#[cfg(test)]
mod tests;<|fim▁end|> | pub fn connect() {}
}
|
<|file_name|>RegexVariables.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the<|fim▁hole|> *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.annotator.regex;
import java.util.regex.Pattern;
/**
* RegexVariables interface.
*/
public interface RegexVariables {
public static final String VARIABLE_START = "\\v";
public static final String VARIABLE_REGEX_BEGIN = "\\\\v\\{";
public static final String VARIABLE_REGEX_END = "\\}";
public static final Pattern VARIABLE_REGEX_PATTERN = Pattern
.compile(VARIABLE_REGEX_BEGIN + "(\\w+)" + VARIABLE_REGEX_END);
/**
* Adds a variable to the Variables object.
*
* @param varName
* variable name
*
* @param varValue
* variable value
*/
public void addVariable(String varName, String varValue);
/**
* returns the value of the specified variable or <code>null</code> if the
* variable does not exist
*
* @param varName
* variable name
*
* @return returns the variable value of <code>null</code> if the variable
* does not exist
*
*/
public String getVariableValue(String varName);
}<|fim▁end|> | * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at |
<|file_name|>DesktopModule.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package grails.plugin.console.charts.client.gin;
import com.gwtplatform.mvp.client.gin.AbstractPresenterModule;
import grails.plugin.console.charts.client.application.ApplicationDesktopModule;
/**
* @author <a href='mailto:[email protected]'>Alexey Zhokhov</a>
*/
public class DesktopModule extends AbstractPresenterModule {
@Override
protected void configure() {
install(new ApplicationDesktopModule());
}
}<|fim▁end|> | * |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
setup(name='jgcspendfrom',
version='1.0',
description='Command-line utility for jgcoin "coin control"',
author='Gavin Andresen',
author_email='[email protected]',<|fim▁hole|> requires=['jsonrpc'],
scripts=['spendfrom.py'],
)<|fim▁end|> | |
<|file_name|>indent.js<|end_file_name|><|fim▁begin|>// test: indent_only
const
foo
=
1
+
2
;
const foo =
[1, 2];
<|fim▁hole|> {a, b};
const foo = {
a, b};
someMethod(foo, [
0, 1, 2,], bar);
someMethod(
foo,
[0, 1, 2,],
bar);
someMethod(foo, [
0, 1,
2,
], bar);
someMethod(
foo,
[
1, 2],);
someMethod(
foo,
[1, 2],
);
someMethod(foo, {
a: 1, b: 2,}, bar);
someMethod(
foo,
{a: 1, b: 2,},
bar);
someMethod(foo, {
a: 1, b: 2
}, bar);
someMethod(
foo,
{
a: 1, b: 2},);
someMethod(
foo,
{a: 1, b: 2},
);
someMethod(a =>
a*2
);
someMethod(a => {
a*2}
);
foo()
.bar(a => a*2);
foo().bar(a =>
a*2);
foo =
function() {
};
foo =
function() {
};
switch (foo) {
case 1: return b;}
switch (foo) {
case 1:
return b;}
class Foo {
}
class Foo {
bar(
) {}
}
class Foo {
bar() {
}
}
if (x) {
statement;
more;
}<|fim▁end|> | const foo = [
1, 2];
const foo = |
<|file_name|>files.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# GMate - Plugin Based Programmer's Text Editor
# Copyright © 2008 Alexandre da Silva / Carlos Antonio da Silva
#
# This file is part of Gmate.
#
# See LICENTE.TXT for licence information
import os
import gnomevfs
from datetime import datetime
def get_file_info(uri):
"""Return the File information if uri exists"""
if uri is not None and gnomevfs.exists(uri):
return gnomevfs.get_file_info(uri)
return False
def is_uri_dir(uri):
"""Checks if given uri is a dir"""
file_info = get_file_info(uri)
if file_info:
return is_dir(file_info)
return False
def is_dir(file_info):
"""Checks to see if the file is a directory."""
if file_info is not None:
return file_info.type == gnomevfs.FILE_TYPE_DIRECTORY
return False
def is_file(file_info):
"""Checks to see if the file is a directory."""<|fim▁hole|>
def is_hidden(file_info):
"""Checks to see if the file is hidden."""
if file_info is not None:
return file_info.name.startswith(u'.') or file_info.name.endswith(u'~')
return False
def is_hidden_dir(file_info):
"""Checks to see if the file is a hidden directory."""
return is_dir(file_info) and is_hidden(file_info)
def is_hidden_file(file_info):
"""Checks to see if the file is a hidden file."""
return not is_dir(file_info) and is_hidden(file_info)
def is_visible_dir(file_info):
"""Checks to see if the file is a visible directory."""
return is_dir(file_info) and not is_hidden(file_info)
def is_visible_file(file_info):
"""Checks to see if the file is a visible file."""
return not is_dir(file_info) and not is_hidden(file_info)
def get_user_home_uri():
"""Gets a URI pointing to the user's home directory '~'."""
return gnomevfs.URI(u'file://%s' % os.path.expanduser(u'~'))
def get_mime_type(uri):
"""Gets the mime type of given file uri"""
return gnomevfs.get_mime_type(uri)
def get_path_from_uri(uri):
return gnomevfs.get_local_path_from_uri(uri)
def get_last_modification(uri):
"""Gigen a file uri return the last modification date"""
file_info = get_file_info(uri)
if file_info:
return datetime.fromtimestamp(file_info.mtime)<|fim▁end|> | if file_info is not None:
return file_info.type != gnomevfs.FILE_TYPE_DIRECTORY
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.