max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
353
package org.nutz.shiro.biz.acl; import org.nutz.shiro.bean.UserRole; import org.nutz.shiro.biz.BaseService; /** * * @author kerbores * * @email <EMAIL> * */ public class UserRoleService extends BaseService<UserRole> { }
89
388
// // CardboardDeviceParams.h // CardboardSDK-iOS // #ifndef __CardboardSDK_iOS__CardboardDeviceParams__ #define __CardboardSDK_iOS__CardboardDeviceParams__ #import <Foundation/Foundation.h> namespace CardboardSDK { class Distortion; class FieldOfView; class CardboardDeviceParams { public: CardboardDeviceParams(); CardboardDeviceParams(CardboardDeviceParams* params); ~CardboardDeviceParams(); NSString *vendor(); NSString *model(); float interLensDistance(); float verticalDistanceToLensCenter(); float screenToLensDistance(); FieldOfView *maximumLeftEyeFOV(); Distortion *distortion(); bool equals(CardboardDeviceParams *other); private: NSString *_vendor; NSString *_model; NSString *_version; float _interLensDistance; float _verticalDistanceToLensCenter; float _screenToLensDistance; FieldOfView *_maximumLeftEyeFOV; Distortion *_distortion; }; } #endif
372
459
#include <GL/glew.h> #include <GLFW/glfw3.h> #include "program_shader.h" void ProgramShader::Use() const { program.Use(); } void ProgramShader::Link() { program.Link(); } inline std::string SourceFromFile(const std::string &filepath) { std::ifstream file(filepath); std::string result((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>()); file.close(); return result; } void ProgramShader::AttachShader(oglplus::ShaderType type, const std::string &filepath) { const auto &source = SourceFromFile(filepath); auto shader = std::make_unique<oglplus::Shader>(type, source); shader->Compile(); program.AttachShader(*shader); shaders.push_back(move(shader)); } const oglplus::Program &ProgramShader::Program() const { return program; }
354
439
<reponame>huayl/heisenberg /** * Baidu.com,Inc. * Copyright (c) 2000-2013 All Rights Reserved. */ package com.baidu.hsb.parser.ast.expression.primary.function.encryption; import java.util.List; import com.baidu.hsb.parser.ast.expression.Expression; import com.baidu.hsb.parser.ast.expression.primary.function.FunctionExpression; /** * @author <EMAIL> */ public class DesDecrypt extends FunctionExpression { public DesDecrypt(List<Expression> arguments) { super("DES_DECRYPT", arguments); } @Override public FunctionExpression constructFunction(List<Expression> arguments) { return new DesDecrypt(arguments); } }
233
485
<filename>PKShortVideoDemo/PKShortVideoItem2.h // // PKShortVideoItem2.h // PKShortVideo // // Created by TYM01 on 16/9/26. // Copyright © 2016年 pepsikirk. All rights reserved. // #import "JSQMediaItem.h" @interface PKShortVideoItem2 : JSQMediaItem @property (nonatomic, strong) NSString *videoPath; @property (strong, nonatomic) UIImage *image; - (instancetype)initWithVideoPath:(NSString *)videoPath previewImage:(UIImage *)image; - (void)play; - (void)pause; @end
173
1,755
/*========================================================================= Program: Visualization Toolkit Module: vtkGraphItem.h Copyright (c) <NAME>, <NAME>, <NAME> All rights reserved. See Copyright.txt or http://www.kitware.com/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. =========================================================================*/ /** * @class vtkGraphItem * @brief a vtkContextItem that draws a block (optional label). * * * This is a vtkContextItem that can be placed into a vtkContextScene. It draws * a block of the given dimensions, and reacts to mouse events. */ #ifndef vtkGraphItem_h #define vtkGraphItem_h #include "vtkContextItem.h" class vtkContext2D; class vtkGraph; class vtkGraphItem : public vtkContextItem { public: vtkTypeMacro(vtkGraphItem, vtkContextItem); void PrintSelf(ostream& os, vtkIndent indent) override; static vtkGraphItem* New(); vtkGetObjectMacro(Graph, vtkGraph); virtual void SetGraph(vtkGraph* g); /** * Paint event for the item. */ bool Paint(vtkContext2D* painter) override; /** * Returns true if the supplied x, y coordinate is inside the item. */ bool Hit(const vtkContextMouseEvent& mouse) override; /** * Mouse enter event. */ bool MouseEnterEvent(const vtkContextMouseEvent& mouse) override; /** * Mouse move event. */ bool MouseMoveEvent(const vtkContextMouseEvent& mouse) override; /** * Mouse leave event. */ bool MouseLeaveEvent(const vtkContextMouseEvent& mouse) override; /** * Mouse button down event. */ bool MouseButtonPressEvent(const vtkContextMouseEvent& mouse) override; /** * Mouse button release event. */ bool MouseButtonReleaseEvent(const vtkContextMouseEvent& mouse) override; void UpdatePositions(); protected: vtkGraphItem(); ~vtkGraphItem() override; float LastPosition[2]; bool MouseOver; int MouseButtonPressed; vtkGraph* Graph; vtkIdType HitVertex; class Implementation; Implementation* Impl; private: vtkGraphItem(const vtkGraphItem&) = delete; void operator=(const vtkGraphItem&) = delete; }; #endif // vtkGraphItem_h
720
317
#ifndef __AIForeignObject__ #define __AIForeignObject__ /* * Name: AIForeignObject.h * Purpose: * * ADOBE SYSTEMS INCORPORATED * Copyright 2002-2007 Adobe Systems Incorporated. * All rights reserved. * * NOTICE: Adobe permits you to use, modify, and distribute this file * in accordance with the terms of the Adobe license agreement * accompanying it. If you have received this file from a source other * than Adobe, then your use, modification, or distribution of it * requires the prior written permission of Adobe. * */ /*******************************************************************************/ // Imports #ifndef __AITypes__ #include "AITypes.h" #endif #include "AIHeaderBegin.h" /** @file AIForeignObject.h */ /*******************************************************************************/ // Constants /** Foreign Object Suite name */ #define kAIForeignObjectSuite "AI Foreign Object Suite" /** Foreign Object Suite version */ #define kAIForeignObjectSuiteVersion4 AIAPI_VERSION(4) /** Foreign Object Suite version */ #define kAIForeignObjectSuiteVersion kAIForeignObjectSuiteVersion4 /** Foreign Object Suite version */ #define kAIForeignObjectVersion kAIForeignObjectSuiteVersion /*******************************************************************************/ // Types /** Wrap this \c AGMDisplayListPort* as a \c CAGMDisplayListPort or some baseclass in order to use it. */ typedef struct _AIDisplayPort *AIDisplayPortHandle; /*******************************************************************************/ // Suite /** @ingroup Suites This suite allows you to work with \e foreign \e objects, that is, art objects that encapsulate imaging constructs which are supported by PDF and the Adobe Imaging Model, but are not native to Illustrator. Foreign objects behave like placed objects in that they can be positioned on the page, saved, displayed on the screen, and printed. They do not, however, respond to other Illustrator editing commands. A foreign object can use global resources that are used by other artwork elements, such as fonts or spot colors. Illustrator requires that all spot colors used in a document be installed into the swatch list. Each spot color must have a unique definition and spot colors used by foreign objects are locked against having their definition edited. The installation and locking of spot colors needs to be done once the foreign object has been created and is known to be a permanent result of an operation on the document, as opposed to a temporary object created during an operation. For this reason installation of spot colors is done when an editing operation is committed. When spot colors are installed, their definitions can conflict with existing spot colors. in which case the user is prompted to resolve the conflict. @see \c #AIFOConversionSuite for functions that expand foreign objects to native Illustrator artwork. \li Acquire this suite using \c #SPBasicSuite::AcquireSuite() with the constants \c #kAIForeignObjectSuite and \c #kAIForeignObjectVersion. */ typedef struct AIForeignObjectSuite { /** Creates a new, empty foreign object. Pass this object to \c #GetDisplayPort(), and draw into that port to fill the object. @param paintOrder The paint order position of the new art, relative to the \c prep object. See \c AITypes::AIPaintOrder. @param prep The prepositional art object.See \c #AIArtSuite::NewArt(). @param copyObjects When true, all objects drawn into the display list are deep copied. Set to false for maximum efficiency, but must be true if the objects depend on resources that will go away after the display list has been created. @param newArt [out] A buffer in which to return the new artwork. */ AIAPI AIErr (*New) (ai::int16 paintOrder, AIArtHandle prep, AIBoolean copyObjects, AIArtHandle *newArt); /** Reports whether an art object is a foreign object. (Note that this function returns a boolean value, not an error code.) @param art The art object. @return True if the art is a foreign object. */ AIAPI AIBoolean (*Is) (AIArtHandle art); /** Retrieves the bounds of a foreign object (that is, the bounds of the display list contents before application of a transformation matrix, @param art The foreign object @param bounds [out] A buffer in which to return the bounding box. */ AIAPI AIErr (*GetBounds) ( AIArtHandle art, AIRealRect *bounds ); /** Sets the bounds of the foreign object (that is, the bounds of the display list contents before application of a transformation matrix, @param art The foreign object @param bounds The bounding box. */ AIAPI AIErr (*SetBounds) ( AIArtHandle art, AIRealRect *bounds ); /** Retrieves the transformation matrix of a foreign object, which transforms display list coordinates to artboard coordinates. @param art The foreign object @param matrix [out] A buffer in which to return the transformation matrix. */ AIAPI AIErr (*GetMatrix) (AIArtHandle art, AIRealMatrix *matrix); /** Sets the transformation matrix of a foreign object, which transforms display list coordinates to artboard coordinates. @param art The foreign object @param matrix The transformation matrix. */ AIAPI AIErr (*SetMatrix) (AIArtHandle art, AIRealMatrix *matrix); /** Retrieves the AGM display port of a foreign object, Pass a new, empty foreign object, and draw into the port. The drawing commands are recorded by the display port and are played back whenever the foreign object is rendered. One way to draw into the display port is to set it as the \c portV6 member of the \c #AIDrawArtData structure and use \c #AIDrawArtSuite::DrawArt(). If you have access to the AGM interface, you can draw directly into the display list or play its contents to another AGM port. @param art The foreign object, as returned by \c #New(). @param port [out] A buffer in which to return the display port. */ AIAPI AIErr (*GetDisplayPort)(AIArtHandle art, AIDisplayPortHandle *port); /** Not implemented. a no-op. */ AIAPI AIErr (*InstallResources)(AIArtHandle art); /** Duplicates a foreign object, stripping off any attributes that affect the rendering of the contents of the object, other than the transformation matrix. Used with the \c #kAIFOConversionSuppressAttributes flag in the \c #AIFOConversionSuite. @param art The foreign object. @param paintOrder The paint order position of the new art, relative to the \c prep object. See \c AITypes::AIPaintOrder. @param prep The prepositional art object.See \c #AIArtSuite::NewArt(). @param newArt [out] A buffer in which to return the new artwork. */ AIAPI AIErr (*DuplicateForConversion)(AIArtHandle art, ai::int16 paintOrder, AIArtHandle prep, AIArtHandle* newArt); } AIForeignObjectSuite; #include "AIHeaderEnd.h" #endif
1,937
640
<reponame>jpoikela/z88dk int func() { far char *ptr; return *ptr; } int func2() { far char *ptr; return *ptr++; } int func3() { far char *ptr; return *++ptr; } int func4() { far char *ptr; return ptr[3]; } int func5(far char *ptr, char val) { *ptr = val; func5(ptr,1); } int func6() { char *ptr; func5(ptr,1); } struct x { int y; char buf[10]; int z; }; void func7() { far struct x *ptr; ptr->z = 1; }
225
348
{"nom":"Nauroy","circ":"2ème circonscription","dpt":"Aisne","inscrits":523,"abs":352,"votants":171,"blancs":2,"nuls":8,"exp":161,"res":[{"nuance":"LR","nom":"<NAME>","voix":110},{"nuance":"FN","nom":"Mme <NAME>","voix":51}]}
91
559
<filename>examples/kancolle/src/main/java/kancolle/entityauth/KanmusuAuthenticationData.java<gh_stars>100-1000 /** * Copyright (c) 2014-2017 Netflix, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kancolle.entityauth; import kancolle.KanColleMslError; import com.netflix.msl.MslEncodingException; import com.netflix.msl.MslEntityAuthException; import com.netflix.msl.MslError; import com.netflix.msl.entityauth.EntityAuthenticationData; import com.netflix.msl.io.MslEncoderException; import com.netflix.msl.io.MslEncoderFactory; import com.netflix.msl.io.MslEncoderFormat; import com.netflix.msl.io.MslObject; /** * <p>Each Kanmusu ship is identified by a type and name. The unique identity * of a ship is equal to the type and name concatenated in that order by a * single colon ":" character.</p> * * <p> * {@code { * "#mandatory" : [ "type", "name" ], * "type" : "string", * "name" : "name", * }} where: * <ul> * <li>{@code type} is the ship type</li> * <li>{@code name} is the ship name</li> * </ul></p> * * @author <NAME> <<EMAIL>> */ public class KanmusuAuthenticationData extends EntityAuthenticationData { /** Key ship type. */ private static final String KEY_TYPE = "type"; /** Key ship name. */ private static final String KEY_NAME = "name"; /** Colon character. */ private static final String CHAR_COLON = ":"; /** * Construct a new Kanmusu authentication data instance with the specified * type and name. Colons are not permitted in the type or name. * * @param type the ship type. * @param name the ship name. * @throws IllegalArgumentException if the type or name contains a colon. */ public KanmusuAuthenticationData(final String type, final String name) { super(KanColleEntityAuthenticationScheme.KANMUSU); // Colons are not permitted in the type or name. if (type.contains(CHAR_COLON) || name.contains(CHAR_COLON)) throw new IllegalArgumentException("Colons are not permitted in the type [" + type + "] or name [" + name + "]."); this.type = type; this.name = name; } /** * Construct a new Kanmusu authentication data instance from the provided * identity. * * @param identity the ship identity. * @throws IllegalArgumentException if the identity does not consist of a * type and name separated by a single colon. * @see #getIdentity() */ public KanmusuAuthenticationData(final String identity) { super(KanColleEntityAuthenticationScheme.KANMUSU); // Split on the colon. final String[] parts = identity.split(CHAR_COLON); if (parts.length != 2) throw new IllegalArgumentException("Identity must consist of a type and name separated by a single colon."); this.type = parts[0]; this.name = parts[1]; } /** * Construct a new Kanmusu authentication data instance from the provided * MSL object. * * @param kanmusuMo the authentication data MSL object. * @throws MslEncodingException if there is an error parsing the entity * authentication data. * @throws MslEntityAuthException if the type or name includes a colon. */ public KanmusuAuthenticationData(final MslObject kanmusuMo) throws MslEncodingException, MslEntityAuthException { super(KanColleEntityAuthenticationScheme.KANMUSU); try { type = kanmusuMo.getString(KEY_TYPE); name = kanmusuMo.getString(KEY_NAME); } catch (final MslEncoderException e) { throw new MslEncodingException(MslError.MSL_PARSE_ERROR, "kanmusu authdata " + kanmusuMo.toString(), e); } // Colons are not permitted in the type or name. if (type.contains(CHAR_COLON) || name.contains(CHAR_COLON)) throw new MslEntityAuthException(KanColleMslError.KANMUSU_ILLEGAL_IDENTITY, "kanmusu authdata " + kanmusuMo.toString()); } /* (non-Javadoc) * @see com.netflix.msl.entityauth.EntityAuthenticationData#getIdentity() */ @Override public String getIdentity() { return type + ":" + name; } /** * @return the ship type. */ public String getType() { return type; } /** * @return the ship name. */ public String getName() { return name; } /* (non-Javadoc) * @see com.netflix.msl.entityauth.EntityAuthenticationData#getAuthData(com.netflix.msl.io.MslEncoderFactory, com.netflix.msl.io.MslEncoderFormat) */ @Override public MslObject getAuthData(final MslEncoderFactory encoder, final MslEncoderFormat format) throws MslEncoderException { final MslObject mo = encoder.createObject(); mo.put(KEY_TYPE, type); mo.put(KEY_NAME, name); return mo; } /** Ship type. */ private final String type; /** Ship name. */ private final String name; }
2,086
320
<filename>tantum/libs/fmix/datasets/google_commands/google_commands.py """Google speech commands dataset.""" __author__ = '<NAME>' import os import numpy as np from torch.utils.data import Dataset __all__ = [ 'CLASSES', 'SpeechCommandsDataset', 'BackgroundNoiseDataset' ] CLASSES = 'unknown, silence, yes, no, up, down, left, right, on, off, stop, go'.split(', ') class SpeechCommandsDataset(Dataset): """Google speech commands dataset. Only 'yes', 'no', 'up', 'down', 'left', 'right', 'on', 'off', 'stop' and 'go' are treated as known classes. All other classes are used as 'unknown' samples. See for more information: https://www.kaggle.com/c/tensorflow-speech-recognition-challenge """ def __init__(self, folder, transform=None, classes=CLASSES, silence_percentage=0.1): try: import librosa except: raise ModuleNotFoundError('Librosa package is reuiqred for google commands experiments. Try pip install') all_classes = [d for d in os.listdir(folder) if os.path.isdir(os.path.join(folder, d)) and not d.startswith('_')] #for c in classes[2:]: # assert c in all_classes class_to_idx = {classes[i]: i for i in range(len(classes))} for c in all_classes: if c not in class_to_idx: class_to_idx[c] = 0 data = [] for c in all_classes: d = os.path.join(folder, c) target = class_to_idx[c] for f in os.listdir(d): path = os.path.join(d, f) data.append((path, target)) # add silence target = class_to_idx['silence'] data += [('', target)] * int(len(data) * silence_percentage) self.classes = classes self.data = data self.transform = transform def __len__(self): return len(self.data) def __getitem__(self, index): path, target = self.data[index] data = {'path': path, 'target': target} if self.transform is not None: data = self.transform(data) return data, target def make_weights_for_balanced_classes(self): """adopted from https://discuss.pytorch.org/t/balanced-sampling-between-classes-with-torchvision-dataloader/2703/3""" nclasses = len(self.classes) count = np.zeros(nclasses) for item in self.data: count[item[1]] += 1 N = float(sum(count)) weight_per_class = N / count weight = np.zeros(len(self)) for idx, item in enumerate(self.data): weight[idx] = weight_per_class[item[1]] return weight class BackgroundNoiseDataset(Dataset): """Dataset for silence / background noise.""" def __init__(self, folder, transform=None, sample_rate=16000, sample_length=1): try: import librosa except: raise ModuleNotFoundError('Librosa package is reuiqred for google commands experiments. Try pip install') audio_files = [d for d in os.listdir(folder) if os.path.isfile(os.path.join(folder, d)) and d.endswith('.wav')] samples = [] for f in audio_files: path = os.path.join(folder, f) s, sr = librosa.load(path, sample_rate) samples.append(s) samples = np.hstack(samples) c = int(sample_rate * sample_length) r = len(samples) // c self.samples = samples[:r*c].reshape(-1, c) self.sample_rate = sample_rate self.classes = CLASSES self.transform = transform self.path = folder def __len__(self): return len(self.samples) def __getitem__(self, index): data = {'samples': self.samples[index], 'sample_rate': self.sample_rate, 'target': 1, 'path': self.path} if self.transform is not None: data = self.transform(data) return data
1,709
1,091
{ "comments": [ "Demo of adding badges to devices and hosts" ], "title": "Demo adding badges", "params": { "lastAuto": 12 }, "description": [ "Demonstrate the device/host badging feature." ] }
81
673
""" block_replacements """ from __future__ import absolute_import, division, print_function, unicode_literals import logging from PySide import QtCore, QtGui from mcedit2.ui.block_replacements import Ui_BlockReplacements from mcedit2.util.resources import resourcePath from mcedit2.widgets.blockpicker import BlockTypeButton from mcedit2.widgets.layout import Row, Column log = logging.getLogger(__name__) class BlockReplacementButton(QtGui.QWidget): def __init__(self, parent=None): super(BlockReplacementButton, self).__init__() self.replacementList = BlockReplacementList() self.replacementDialog = QtGui.QDialog() self.replacementOk = QtGui.QPushButton(self.tr("OK")) self.replacementOk.clicked.connect(self.replacementDialog.accept) self.replacementDialog.setWindowTitle(self.tr("Choose blocks to replace")) self.replacementDialog.setLayout(Column(self.replacementList, Row(None, self.replacementOk))) self.oldBlockButton = BlockTypeButton(multipleSelect=True) self.newBlockButton = BlockTypeButton() self.advancedButton = QtGui.QPushButton(self.tr("Advanced...")) self.simpleButton = QtGui.QPushButton(self.tr("No, simple!")) self.simpleButton.setVisible(False) self.simpleButton.clicked.connect(self.goSimple) self.setLayout(Column(self.oldBlockButton, self.newBlockButton, self.advancedButton, self.simpleButton, margin=0)) self.oldBlockButton.blocksChanged.connect(self.simpleBlocksChanged) self.newBlockButton.blocksChanged.connect(self.simpleBlocksChanged) self.advancedButton.clicked.connect(self.displayDialog) replacementsChanged = QtCore.Signal() _editorSession = None @property def editorSession(self): return self._editorSession @editorSession.setter def editorSession(self, session): self._editorSession = session self.oldBlockButton.editorSession = session self.newBlockButton.editorSession = session self.replacementList.editorSession = session def displayDialog(self): self.replacementDialog.exec_() replacements = self.replacementList.getReplacements() if len(replacements) == 0: self.oldBlockButton.blocks = [] self.newBlockButton.blocks = [] elif len(replacements) == 1: old, new = replacements[0] self.oldBlockButton.blocks = old self.newBlockButton.block = new if len(replacements) > 1: self.oldBlockButton.blocks = [] self.newBlockButton.blocks = [] self.oldBlockButton.setEnabled(False) self.newBlockButton.setEnabled(False) self.simpleButton.setVisible(True) else: self.oldBlockButton.setEnabled(True) self.newBlockButton.setEnabled(True) self.simpleButton.setVisible(False) self.replacementsChanged.emit() def goSimple(self): self.oldBlockButton.blocks = [] self.newBlockButton.blocks = [] self.simpleButton.setVisible(False) def simpleBlocksChanged(self): old = self.oldBlockButton.blocks new = self.newBlockButton.block if new is not None: replacements = [(old, new)] else: replacements = [] log.info("Replacements button: %s", replacements) self.replacementList.setReplacements(replacements) self.replacementsChanged.emit() def getReplacements(self): return self.replacementList.getReplacements() class BlockReplacementList(QtGui.QWidget, Ui_BlockReplacements): def __init__(self, parent=None): super(BlockReplacementList, self).__init__(parent) self.setupUi(self) header = self.findReplaceTable.horizontalHeader() header.setResizeMode(0, QtGui.QHeaderView.Stretch) header.setResizeMode(1, QtGui.QHeaderView.Stretch) self.editorSession = None self.clearTable() def clearTable(self): addButton = QtGui.QPushButton("Add...", flat=True, clicked=self.addNewRow) addButton.setIcon(QtGui.QIcon(resourcePath("mcedit2/assets/mcedit2/icons/add.png"))) addButton.setMinimumHeight(48) addButton.setIconSize(QtCore.QSize(32, 32)) addItem = QtGui.QTableWidgetItem(text="Add...") addItem.setSizeHint(addButton.sizeHint()) self.findReplaceTable.clear() self.findReplaceTable.setRowCount(1) self.findReplaceTable.setItem(0, 0, addItem) self.findReplaceTable.setSpan(0, 0, 1, 2) self.findReplaceTable.setCellWidget(0, 0, addButton) self.findReplaceTable.resizeRowsToContents() self.findReplaceTable.resizeColumnsToContents() @property def blocktypes(self): return self.editorSession.worldEditor.blocktypes if self.editorSession else None def addNewRow(self): self.addRow([], self.blocktypes["air"]) def addRow(self, oldBlocks, newBlock): assert self.editorSession is not None, "Must set BlockReplacementList.editorSession before using" row = self.findReplaceTable.rowCount() - 1 self.findReplaceTable.insertRow(row) log.info("Row inserted") left = QtGui.QTableWidgetItem() right = QtGui.QTableWidgetItem() log.info("Items created") def frameButton(button, withRemove=False): frame = QtGui.QFrame() frame.button = button layout = QtGui.QVBoxLayout() layout.addStretch(1) if withRemove: removeButton = QtGui.QPushButton("", flat=True) removeButton.setIcon(QtGui.QIcon(resourcePath("mcedit2/assets/mcedit2/icons/remove.png"))) removeButton.setIconSize(QtCore.QSize(24, 24)) def _clicked(): self.removeRow(self.findReplaceTable.row(left)) removeButton.__clicked = _clicked removeButton.clicked.connect(_clicked) layout.addLayout(Row((button, 1), removeButton)) else: layout.addWidget(button) layout.addStretch(1) frame.setLayout(layout) return frame leftButton = BlockTypeButton(flat=True, multipleSelect=True) leftButton.editorSession = self.editorSession leftButton.blocks = oldBlocks leftFramedButton = frameButton(leftButton) left.setSizeHint(leftFramedButton.sizeHint()) log.info("Left button") rightButton = BlockTypeButton(flat=True) rightButton.editorSession = self.editorSession rightButton.block = newBlock rightFramedButton = frameButton(rightButton, True) right.setSizeHint(rightFramedButton.sizeHint()) log.info("Right button") self.findReplaceTable.setItem(row, 0, left) self.findReplaceTable.setItem(row, 1, right) self.findReplaceTable.setCellWidget(row, 0, leftFramedButton) self.findReplaceTable.setCellWidget(row, 1, rightFramedButton) self.findReplaceTable.resizeRowsToContents() #self.findReplaceTable.resizeColumnsToContents() log.info("Done") def removeRow(self, row): self.findReplaceTable.removeRow(row) def getReplacements(self): def _get(): for row in range(self.findReplaceTable.rowCount()-1): left = self.findReplaceTable.cellWidget(row, 0).button right = self.findReplaceTable.cellWidget(row, 1).button yield left.blocks, right.block return list(_get()) def setReplacements(self, replacements): if replacements == self.getReplacements(): return self.clearTable() for old, new in replacements: self.addRow(old, new)
3,451
2,023
<filename>recipes/Python/286209_set_class_mutable_objects_unique_hashable_id/recipe-286209.py class KeyedSet(dict): """ A set class for handling collections of arbitrary objects that have unique, and hashable 'id' attributes. Set items are stored as values in a dictionary, with ids as keys. There is no requirement for set items to be hashable. The class requires a 1 to 1 mapping between objects and their ids, and is designed for cases where access to items via a key lookup is also desirable. """ def __init__(self, items=None): if items is not None: for item in items: self[item.id] = item def add(self, item): self[item.id] = item def remove(self, item): del self[item.id] def __contains__(self, item): try: return self.has_key(item.id) except AttributeError: return False def __iter__(self): return self.itervalues() def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.keys()) def __cmp__(self, other): raise TypeError, "can't compare KeyedSets using cmp()" def issubset(self, other): self._binary_check(other) if len(self) > len(other): return False else: for key in self.iterkeys(): if not other.has_key(key): return False return True def issuperset(self, other): self._binary_check(other) return other.issubset(self) __le__ = issubset __ge__ = issuperset def __lt__(self, other): self._binary_check(other) return len(self) < len(other) and self.issubset(other) def __gt__(self, other): self._binary_check(other) return len(self) > len(other) and self.issuperset(other) def __eq__(self, other): if isinstance(other, self.__class__): return len(self) == len(other) and self.issubset(other) else: return False def __ne__(self, other): if isinstance(other, self.__class__): return not self == other else: return True def union(self, other): res = self.copy() for item in other: res.add(item) return res def intersection(self, other): res = self.__class__() if not isinstance(other, self.__class__): other = self.__class__(other) if len(self) > len(other): for item in other: if item in self: res.add(item) else: for item in self: if item in other: res.add(item) return res def difference(self, other): res = self.copy() for item in other: if item in res: res.remove(item) return res def symmetric_difference(self, other): res = self.copy() if not isinstance(other, self.__class__): other = self.__class__(other) for item in other: if item in self: res.remove(item) else: res.add(item) return res def __or__(self, other): self._binary_check(other) return self.union(other) def __and__(self, other): self._binary_check(other) return self.intersection(other) def __sub__(self, other): self._binary_check(other) return self.difference(other) def __xor__(self, other): self._binary_check(other) return self.symmetric_difference(other) def _binary_check(self, other): if not isinstance(other, self.__class__): raise TypeError, "Binary operation only permitted between KeyedSets" def copy(self): res = self.__class__() res.update(self) return res def union_update(self, other): if isinstance(other, (self.__class__, dict)): self.update(other) else: for item in other: self.add(item) def intersection_update(self, other): if not isinstance(other, self.__class__): other = self.__class__(other) self &= other def difference_update(self, other): for item in other: self.discard(item) def symmetric_difference_update(self, other): if not isinstance(other, self.__class__): other = self.__class__(other) for item in other: if item in self: self.remove(item) else: self.add(item) def __ior__(self, other): self._binary_check(other) self.union_update(other) return self def __iand__(self, other): self._binary_check(other) intersect = self & other self.clear() self.update(intersect) return self def __isub__(self, other): self._binary_check(other) self.difference_update(other) return self def __ixor__(self, other): self._binary_check(other) self.symmetric_difference_update(other) return self def discard(self, item): try: self.remove(item) except KeyError: pass def pop(self, *args): if args: return super(self.__class__, self).pop(*args) else: return self.popitem()[1] def update(self, other): if isinstance(other, (self.__class__, dict)): super(self.__class__, self).update(other) else: for item in other: self.add(item)
2,692
600
//----------------------------------------------------------------------------- // File: XBResource.cpp // // Desc: Loads resources from an XPR (Xbox Packed Resource) file. // // Hist: 03.12.01 - New for April XDK release // // Copyright (c) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- #include <xtl.h> #include <xgraphics.h> #include <stdio.h> #include "XBUtil.h" #include "XBResource.h" //----------------------------------------------------------------------------- // Name: XBResource_SizeOf() // Desc: Determines the byte size of a D3DResource //----------------------------------------------------------------------------- DWORD XBResource_SizeOf( LPDIRECT3DRESOURCE8 pResource ) { switch( pResource->GetType() ) { case D3DRTYPE_TEXTURE: return sizeof(D3DTexture); case D3DRTYPE_VOLUMETEXTURE: return sizeof(D3DVolumeTexture); case D3DRTYPE_CUBETEXTURE: return sizeof(D3DCubeTexture); case D3DRTYPE_VERTEXBUFFER: return sizeof(D3DVertexBuffer); case D3DRTYPE_INDEXBUFFER: return sizeof(D3DIndexBuffer); case D3DRTYPE_PALETTE: return sizeof(D3DPalette); } return 0; } //----------------------------------------------------------------------------- // Name: CXBPackedResource() // Desc: Constructor //----------------------------------------------------------------------------- CXBPackedResource::CXBPackedResource() { m_pSysMemData = NULL; m_pVidMemData = NULL; m_dwNumResources = 0L; m_pResourceTags = NULL; } //----------------------------------------------------------------------------- // Name: ~CXBPackedResource() // Desc: Destructor //----------------------------------------------------------------------------- CXBPackedResource::~CXBPackedResource() { Destroy(); } //----------------------------------------------------------------------------- // Name: GetData() // Desc: Loads all the texture resources from the given XPR. //----------------------------------------------------------------------------- VOID* CXBPackedResource::GetData( const CHAR* strName ) const { if( NULL==m_pResourceTags || NULL==strName ) return NULL; for( DWORD i=0; i<m_dwNumResources; i++ ) { if( !_stricmp( strName, m_pResourceTags[i].strName ) ) return &m_pSysMemData[m_pResourceTags[i].dwOffset]; } return NULL; } //----------------------------------------------------------------------------- // Name: Create() // Desc: Loads all the texture resources from the given XPR. //----------------------------------------------------------------------------- HRESULT CXBPackedResource::Create( LPDIRECT3DDEVICE8 pd3dDevice, const CHAR* strFilename, DWORD dwNumResources, XBRESOURCE* pResourceTags ) { // Find the media file CHAR strResourcePath[512]; if( FAILED( XBUtil_FindMediaFile( strResourcePath, strFilename ) ) ) return E_FAIL; // Open the file HANDLE hFile; DWORD dwNumBytesRead; hFile = CreateFile(strResourcePath, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_READONLY, NULL); if(hFile == INVALID_HANDLE_VALUE) { OUTPUT_DEBUG_STRING( "CXBPackedResource::Create(): ERROR: File not found!\n" ); return E_FAIL; } // Read in and verify the XPR magic header XPR_HEADER xprh; ReadFile(hFile, &xprh, sizeof(XPR_HEADER), &dwNumBytesRead, NULL); if( xprh.dwMagic != XPR_MAGIC_VALUE ) { OUTPUT_DEBUG_STRING( "Invalid Xbox Packed Resource (.xpr) file" ); return E_INVALIDARG; } // Compute memory requirements DWORD dwSysMemDataSize = xprh.dwHeaderSize - sizeof(XPR_HEADER); DWORD dwVidMemDataSize = xprh.dwTotalSize - xprh.dwHeaderSize; // Allocate memory m_pSysMemData = new BYTE[dwSysMemDataSize]; m_pVidMemData = (BYTE*)D3D_AllocContiguousMemory( dwVidMemDataSize, D3DTEXTURE_ALIGNMENT ); // Read in the data from the file ReadFile(hFile, m_pSysMemData, dwSysMemDataSize, &dwNumBytesRead, NULL); ReadFile(hFile, m_pVidMemData, dwVidMemDataSize, &dwNumBytesRead, NULL); // Done with the file CloseHandle(hFile); // Loop over resources, calling Register() BYTE* pData = m_pSysMemData; for( DWORD i = 0; i < dwNumResources; i++ ) { // Check for user data if( *((DWORD*)pData) & 0x80000000 ) { DWORD dwType = ((DWORD*)pData)[0]; DWORD dwSize = ((DWORD*)pData)[1]; pData += sizeof(DWORD) * 2; (VOID)dwType; // not used pData += dwSize; } else { // Get the resource LPDIRECT3DRESOURCE8 pResource = (LPDIRECT3DRESOURCE8)pData; // Register the resource pResource->Register( m_pVidMemData ); // Advance the pointer pData += XBResource_SizeOf( pResource ); } } // Finally, store number of resources and the resource tags m_dwNumResources = dwNumResources; m_pResourceTags = pResourceTags; return S_OK; } //----------------------------------------------------------------------------- // Name: Destroy() // Desc: Tears down the packed resource data //----------------------------------------------------------------------------- VOID CXBPackedResource::Destroy() { if( m_pSysMemData != NULL ) { delete[] m_pSysMemData; m_pSysMemData = NULL; } if( m_pVidMemData != NULL ) { D3D_FreeContiguousMemory( m_pVidMemData ); m_pVidMemData = NULL; } m_dwNumResources = 0L; m_pResourceTags = NULL; }
2,184
537
from __future__ import print_function import argparse import os import h5py import numpy as np import sys from molecules.model import MoleculeVAE from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, \ decode_smiles_from_indexes, load_dataset from pylab import figure, axes, scatter, title, show from rdkit import Chem from rdkit.Chem import Draw LATENT_DIM = 292 TARGET = 'autoencoder' def get_arguments(): parser = argparse.ArgumentParser(description='Molecular autoencoder network') parser.add_argument('data', type=str, help='File of latent representation tensors for decoding.') parser.add_argument('model', type=str, help='Trained Keras model to use.') parser.add_argument('--save_h5', type=str, help='Name of a file to write HDF5 output to.') parser.add_argument('--target', type=str, default=TARGET, help='What model to sample from: autoencoder, encoder, decoder.') parser.add_argument('--latent_dim', type=int, metavar='N', default=LATENT_DIM, help='Dimensionality of the latent representation.') return parser.parse_args() def read_latent_data(filename): h5f = h5py.File(filename, 'r') data = h5f['latent_vectors'][:] charset = h5f['charset'][:] h5f.close() return (data, charset) def autoencoder(args, model): latent_dim = args.latent_dim data, charset = load_dataset(args.data, split = False) if os.path.isfile(args.model): model.load(charset, args.model, latent_rep_size = latent_dim) else: raise ValueError("Model file %s doesn't exist" % args.model) sampled = model.autoencoder.predict(data[0].reshape(1, 120, len(charset))).argmax(axis=2)[0] mol = decode_smiles_from_indexes(map(from_one_hot_array, data[0]), charset) sampled = decode_smiles_from_indexes(sampled, charset) print(mol) print(sampled) def decoder(args, model): latent_dim = args.latent_dim data, charset = read_latent_data(args.data) if os.path.isfile(args.model): model.load(charset, args.model, latent_rep_size = latent_dim) else: raise ValueError("Model file %s doesn't exist" % args.model) sampled = model.decoder.predict(data[0].reshape(1, latent_dim)).argmax(axis=2)[0] sampled = decode_smiles_from_indexes(sampled, charset) print(sampled) def encoder(args, model): latent_dim = args.latent_dim data, charset = load_dataset(args.data, split = False) if os.path.isfile(args.model): model.load(charset, args.model, latent_rep_size = latent_dim) else: raise ValueError("Model file %s doesn't exist" % args.model) x_latent = model.encoder.predict(data) if args.save_h5: h5f = h5py.File(args.save_h5, 'w') h5f.create_dataset('charset', data = charset) h5f.create_dataset('latent_vectors', data = x_latent) h5f.close() else: np.savetxt(sys.stdout, x_latent, delimiter = '\t') def main(): args = get_arguments() model = MoleculeVAE() if args.target == 'autoencoder': autoencoder(args, model) elif args.target == 'encoder': encoder(args, model) elif args.target == 'decoder': decoder(args, model) if __name__ == '__main__': main()
1,353
3,422
package com.volokh.danylo.visibility_utils.calculator; import android.widget.AbsListView; import com.volokh.danylo.visibility_utils.scroll_utils.ItemsPositionGetter; import com.volokh.danylo.visibility_utils.scroll_utils.ScrollDirectionDetector; import com.volokh.danylo.visibility_utils.utils.Config; import com.volokh.danylo.visibility_utils.utils.Logger; /** * This class encapsulates some basic logic of Visibility calculator. * In onScroll event it calculates Scroll direction using {@link com.volokh.danylo.visibility_utils.scroll_utils.ScrollDirectionDetector} * and then calls appropriate methods */ public abstract class BaseItemsVisibilityCalculator implements ListItemsVisibilityCalculator, ScrollDirectionDetector.OnDetectScrollListener{ private static final boolean SHOW_LOGS = Config.SHOW_LOGS; private static final String TAG = BaseItemsVisibilityCalculator.class.getSimpleName(); private final ScrollDirectionDetector mScrollDirectionDetector = new ScrollDirectionDetector(this); @Override public void onScroll(ItemsPositionGetter itemsPositionGetter, int firstVisibleItem, int visibleItemCount, int scrollState/*TODO: add current item here. start tracking from it*/) { if (SHOW_LOGS) Logger.v(TAG, ">> onScroll"); if (SHOW_LOGS) Logger.v(TAG, "onScroll, firstVisibleItem " + firstVisibleItem + ", visibleItemCount " + visibleItemCount + ", scrollState " + scrollStateStr(scrollState)); mScrollDirectionDetector.onDetectedListScroll(itemsPositionGetter, firstVisibleItem); switch (scrollState) { case AbsListView.OnScrollListener.SCROLL_STATE_TOUCH_SCROLL: onStateTouchScroll(itemsPositionGetter); break; case AbsListView.OnScrollListener.SCROLL_STATE_FLING: onStateTouchScroll(itemsPositionGetter); break; case AbsListView.OnScrollListener.SCROLL_STATE_IDLE: if (SHOW_LOGS) Logger.v(TAG, "onScroll, SCROLL_STATE_IDLE. ignoring"); break; } } protected abstract void onStateFling(ItemsPositionGetter itemsPositionGetter); protected abstract void onStateTouchScroll(ItemsPositionGetter itemsPositionGetter); private String scrollStateStr(int scrollState){ switch (scrollState){ case AbsListView.OnScrollListener.SCROLL_STATE_FLING: return "SCROLL_STATE_FLING"; case AbsListView.OnScrollListener.SCROLL_STATE_IDLE: return "SCROLL_STATE_IDLE"; case AbsListView.OnScrollListener.SCROLL_STATE_TOUCH_SCROLL: return "SCROLL_STATE_TOUCH_SCROLL"; default: throw new RuntimeException("wrong data, scrollState " + scrollState); } } }
1,070
3,301
<filename>shaded_libraries/shaded_flink_ai_extended_tf2/src/main/java/com/alibaba/alink/operator/batch/dl/ctr/protos/TowerOuterClass.java // Generated by the protocol buffer compiler. DO NOT EDIT! // source: easy_rec/python/protos/tower.proto package com.alibaba.alink.operator.batch.dl.ctr.protos; public final class TowerOuterClass { private TowerOuterClass() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } public interface TowerOrBuilder extends // @@protoc_insertion_point(interface_extends:protos.Tower) com.google.protobuf.MessageOrBuilder { /** * <code>required string input = 1;</code> */ boolean hasInput(); /** * <code>required string input = 1;</code> */ java.lang.String getInput(); /** * <code>required string input = 1;</code> */ com.google.protobuf.ByteString getInputBytes(); /** * <code>required .protos.DNN dnn = 2;</code> */ boolean hasDnn(); /** * <code>required .protos.DNN dnn = 2;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn(); /** * <code>required .protos.DNN dnn = 2;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder(); } /** * Protobuf type {@code protos.Tower} */ public static final class Tower extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:protos.Tower) TowerOrBuilder { private static final long serialVersionUID = 0L; // Use Tower.newBuilder() to construct. private Tower(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Tower() { input_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new Tower(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Tower( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; input_ = bs; break; } case 18: { com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder subBuilder = null; if (((bitField0_ & 0x00000002) != 0)) { subBuilder = dnn_.toBuilder(); } dnn_ = input.readMessage(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(dnn_); dnn_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000002; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_Tower_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_Tower_fieldAccessorTable .ensureFieldAccessorsInitialized( com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower.class, com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower.Builder.class); } private int bitField0_; public static final int INPUT_FIELD_NUMBER = 1; private volatile java.lang.Object input_; /** * <code>required string input = 1;</code> */ public boolean hasInput() { return ((bitField0_ & 0x00000001) != 0); } /** * <code>required string input = 1;</code> */ public java.lang.String getInput() { java.lang.Object ref = input_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { input_ = s; } return s; } } /** * <code>required string input = 1;</code> */ public com.google.protobuf.ByteString getInputBytes() { java.lang.Object ref = input_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); input_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DNN_FIELD_NUMBER = 2; private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN dnn_; /** * <code>required .protos.DNN dnn = 2;</code> */ public boolean hasDnn() { return ((bitField0_ & 0x00000002) != 0); } /** * <code>required .protos.DNN dnn = 2;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn() { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } /** * <code>required .protos.DNN dnn = 2;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder() { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasInput()) { memoizedIsInitialized = 0; return false; } if (!hasDnn()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, input_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getDnn()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, input_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getDnn()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower)) { return super.equals(obj); } com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower other = (com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower) obj; if (hasInput() != other.hasInput()) return false; if (hasInput()) { if (!getInput() .equals(other.getInput())) return false; } if (hasDnn() != other.hasDnn()) return false; if (hasDnn()) { if (!getDnn() .equals(other.getDnn())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasInput()) { hash = (37 * hash) + INPUT_FIELD_NUMBER; hash = (53 * hash) + getInput().hashCode(); } if (hasDnn()) { hash = (37 * hash) + DNN_FIELD_NUMBER; hash = (53 * hash) + getDnn().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code protos.Tower} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:protos.Tower) com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TowerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_Tower_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_Tower_fieldAccessorTable .ensureFieldAccessorsInitialized( com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower.class, com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower.Builder.class); } // Construct using com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getDnnFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); input_ = ""; bitField0_ = (bitField0_ & ~0x00000001); if (dnnBuilder_ == null) { dnn_ = null; } else { dnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_Tower_descriptor; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower getDefaultInstanceForType() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower.getDefaultInstance(); } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower build() { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower buildPartial() { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower result = new com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.input_ = input_; if (((from_bitField0_ & 0x00000002) != 0)) { if (dnnBuilder_ == null) { result.dnn_ = dnn_; } else { result.dnn_ = dnnBuilder_.build(); } to_bitField0_ |= 0x00000002; } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower) { return mergeFrom((com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower other) { if (other == com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower.getDefaultInstance()) return this; if (other.hasInput()) { bitField0_ |= 0x00000001; input_ = other.input_; onChanged(); } if (other.hasDnn()) { mergeDnn(other.getDnn()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasInput()) { return false; } if (!hasDnn()) { return false; } return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object input_ = ""; /** * <code>required string input = 1;</code> */ public boolean hasInput() { return ((bitField0_ & 0x00000001) != 0); } /** * <code>required string input = 1;</code> */ public java.lang.String getInput() { java.lang.Object ref = input_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { input_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>required string input = 1;</code> */ public com.google.protobuf.ByteString getInputBytes() { java.lang.Object ref = input_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); input_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string input = 1;</code> */ public Builder setInput( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; input_ = value; onChanged(); return this; } /** * <code>required string input = 1;</code> */ public Builder clearInput() { bitField0_ = (bitField0_ & ~0x00000001); input_ = getDefaultInstance().getInput(); onChanged(); return this; } /** * <code>required string input = 1;</code> */ public Builder setInputBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; input_ = value; onChanged(); return this; } private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN dnn_; private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> dnnBuilder_; /** * <code>required .protos.DNN dnn = 2;</code> */ public boolean hasDnn() { return ((bitField0_ & 0x00000002) != 0); } /** * <code>required .protos.DNN dnn = 2;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn() { if (dnnBuilder_ == null) { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } else { return dnnBuilder_.getMessage(); } } /** * <code>required .protos.DNN dnn = 2;</code> */ public Builder setDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (dnnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } dnn_ = value; onChanged(); } else { dnnBuilder_.setMessage(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .protos.DNN dnn = 2;</code> */ public Builder setDnn( com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder builderForValue) { if (dnnBuilder_ == null) { dnn_ = builderForValue.build(); onChanged(); } else { dnnBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; return this; } /** * <code>required .protos.DNN dnn = 2;</code> */ public Builder mergeDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (dnnBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && dnn_ != null && dnn_ != com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance()) { dnn_ = com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.newBuilder(dnn_).mergeFrom(value).buildPartial(); } else { dnn_ = value; } onChanged(); } else { dnnBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /** * <code>required .protos.DNN dnn = 2;</code> */ public Builder clearDnn() { if (dnnBuilder_ == null) { dnn_ = null; onChanged(); } else { dnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } /** * <code>required .protos.DNN dnn = 2;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder getDnnBuilder() { bitField0_ |= 0x00000002; onChanged(); return getDnnFieldBuilder().getBuilder(); } /** * <code>required .protos.DNN dnn = 2;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder() { if (dnnBuilder_ != null) { return dnnBuilder_.getMessageOrBuilder(); } else { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } } /** * <code>required .protos.DNN dnn = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> getDnnFieldBuilder() { if (dnnBuilder_ == null) { dnnBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder>( getDnn(), getParentForChildren(), isClean()); dnn_ = null; } return dnnBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:protos.Tower) } // @@protoc_insertion_point(class_scope:protos.Tower) private static final com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower(); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final com.google.protobuf.Parser<Tower> PARSER = new com.google.protobuf.AbstractParser<Tower>() { @java.lang.Override public Tower parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Tower(input, extensionRegistry); } }; public static com.google.protobuf.Parser<Tower> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Tower> getParserForType() { return PARSER; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.Tower getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface TaskTowerOrBuilder extends // @@protoc_insertion_point(interface_extends:protos.TaskTower) com.google.protobuf.MessageOrBuilder { /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ boolean hasTowerName(); /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ java.lang.String getTowerName(); /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ com.google.protobuf.ByteString getTowerNameBytes(); /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ boolean hasLabelName(); /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ java.lang.String getLabelName(); /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ com.google.protobuf.ByteString getLabelNameBytes(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> getMetricsSetList(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics getMetricsSet(int index); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ int getMetricsSetCount(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ java.util.List<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetOrBuilderList(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder getMetricsSetOrBuilder( int index); /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ boolean hasLossType(); /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType getLossType(); /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ boolean hasNumClass(); /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ int getNumClass(); /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ boolean hasDnn(); /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn(); /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder(); /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ boolean hasWeight(); /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ float getWeight(); /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ boolean hasTaskSpaceIndicatorLabel(); /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ java.lang.String getTaskSpaceIndicatorLabel(); /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ com.google.protobuf.ByteString getTaskSpaceIndicatorLabelBytes(); /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ boolean hasInTaskSpaceWeight(); /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ float getInTaskSpaceWeight(); /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ boolean hasOutTaskSpaceWeight(); /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ float getOutTaskSpaceWeight(); } /** * Protobuf type {@code protos.TaskTower} */ public static final class TaskTower extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:protos.TaskTower) TaskTowerOrBuilder { private static final long serialVersionUID = 0L; // Use TaskTower.newBuilder() to construct. private TaskTower(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TaskTower() { towerName_ = ""; labelName_ = ""; metricsSet_ = java.util.Collections.emptyList(); lossType_ = 0; numClass_ = 1; weight_ = 1F; taskSpaceIndicatorLabel_ = ""; inTaskSpaceWeight_ = 1F; outTaskSpaceWeight_ = 1F; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new TaskTower(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TaskTower( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; towerName_ = bs; break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; labelName_ = bs; break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) != 0)) { metricsSet_ = new java.util.ArrayList<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics>(); mutable_bitField0_ |= 0x00000004; } metricsSet_.add( input.readMessage(com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.PARSER, extensionRegistry)); break; } case 32: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType value = com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000004; lossType_ = rawValue; } break; } case 40: { bitField0_ |= 0x00000008; numClass_ = input.readUInt32(); break; } case 50: { com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder subBuilder = null; if (((bitField0_ & 0x00000010) != 0)) { subBuilder = dnn_.toBuilder(); } dnn_ = input.readMessage(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(dnn_); dnn_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 61: { bitField0_ |= 0x00000020; weight_ = input.readFloat(); break; } case 82: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000040; taskSpaceIndicatorLabel_ = bs; break; } case 93: { bitField0_ |= 0x00000080; inTaskSpaceWeight_ = input.readFloat(); break; } case 101: { bitField0_ |= 0x00000100; outTaskSpaceWeight_ = input.readFloat(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) != 0)) { metricsSet_ = java.util.Collections.unmodifiableList(metricsSet_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_TaskTower_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_TaskTower_fieldAccessorTable .ensureFieldAccessorsInitialized( com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower.class, com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower.Builder.class); } private int bitField0_; public static final int TOWER_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object towerName_; /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public boolean hasTowerName() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public java.lang.String getTowerName() { java.lang.Object ref = towerName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { towerName_ = s; } return s; } } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public com.google.protobuf.ByteString getTowerNameBytes() { java.lang.Object ref = towerName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); towerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LABEL_NAME_FIELD_NUMBER = 2; private volatile java.lang.Object labelName_; /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public boolean hasLabelName() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public java.lang.String getLabelName() { java.lang.Object ref = labelName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { labelName_ = s; } return s; } } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public com.google.protobuf.ByteString getLabelNameBytes() { java.lang.Object ref = labelName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); labelName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int METRICS_SET_FIELD_NUMBER = 3; private java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> metricsSet_; /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> getMetricsSetList() { return metricsSet_; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetOrBuilderList() { return metricsSet_; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public int getMetricsSetCount() { return metricsSet_.size(); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics getMetricsSet(int index) { return metricsSet_.get(index); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder getMetricsSetOrBuilder( int index) { return metricsSet_.get(index); } public static final int LOSS_TYPE_FIELD_NUMBER = 4; private int lossType_; /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public boolean hasLossType() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType getLossType() { @SuppressWarnings("deprecation") com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType result = com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.valueOf(lossType_); return result == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.CLASSIFICATION : result; } public static final int NUM_CLASS_FIELD_NUMBER = 5; private int numClass_; /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public boolean hasNumClass() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public int getNumClass() { return numClass_; } public static final int DNN_FIELD_NUMBER = 6; private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN dnn_; /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public boolean hasDnn() { return ((bitField0_ & 0x00000010) != 0); } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn() { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder() { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } public static final int WEIGHT_FIELD_NUMBER = 7; private float weight_; /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ public boolean hasWeight() { return ((bitField0_ & 0x00000020) != 0); } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ public float getWeight() { return weight_; } public static final int TASK_SPACE_INDICATOR_LABEL_FIELD_NUMBER = 10; private volatile java.lang.Object taskSpaceIndicatorLabel_; /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public boolean hasTaskSpaceIndicatorLabel() { return ((bitField0_ & 0x00000040) != 0); } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public java.lang.String getTaskSpaceIndicatorLabel() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { taskSpaceIndicatorLabel_ = s; } return s; } } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public com.google.protobuf.ByteString getTaskSpaceIndicatorLabelBytes() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); taskSpaceIndicatorLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int IN_TASK_SPACE_WEIGHT_FIELD_NUMBER = 11; private float inTaskSpaceWeight_; /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public boolean hasInTaskSpaceWeight() { return ((bitField0_ & 0x00000080) != 0); } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public float getInTaskSpaceWeight() { return inTaskSpaceWeight_; } public static final int OUT_TASK_SPACE_WEIGHT_FIELD_NUMBER = 12; private float outTaskSpaceWeight_; /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public boolean hasOutTaskSpaceWeight() { return ((bitField0_ & 0x00000100) != 0); } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public float getOutTaskSpaceWeight() { return outTaskSpaceWeight_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTowerName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getMetricsSetCount(); i++) { if (!getMetricsSet(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, towerName_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, labelName_); } for (int i = 0; i < metricsSet_.size(); i++) { output.writeMessage(3, metricsSet_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { output.writeEnum(4, lossType_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeUInt32(5, numClass_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(6, getDnn()); } if (((bitField0_ & 0x00000020) != 0)) { output.writeFloat(7, weight_); } if (((bitField0_ & 0x00000040) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, taskSpaceIndicatorLabel_); } if (((bitField0_ & 0x00000080) != 0)) { output.writeFloat(11, inTaskSpaceWeight_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeFloat(12, outTaskSpaceWeight_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, towerName_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, labelName_); } for (int i = 0; i < metricsSet_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, metricsSet_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, lossType_); } if (((bitField0_ & 0x00000008) != 0)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(5, numClass_); } if (((bitField0_ & 0x00000010) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, getDnn()); } if (((bitField0_ & 0x00000020) != 0)) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(7, weight_); } if (((bitField0_ & 0x00000040) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, taskSpaceIndicatorLabel_); } if (((bitField0_ & 0x00000080) != 0)) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(11, inTaskSpaceWeight_); } if (((bitField0_ & 0x00000100) != 0)) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(12, outTaskSpaceWeight_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower)) { return super.equals(obj); } com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower other = (com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower) obj; if (hasTowerName() != other.hasTowerName()) return false; if (hasTowerName()) { if (!getTowerName() .equals(other.getTowerName())) return false; } if (hasLabelName() != other.hasLabelName()) return false; if (hasLabelName()) { if (!getLabelName() .equals(other.getLabelName())) return false; } if (!getMetricsSetList() .equals(other.getMetricsSetList())) return false; if (hasLossType() != other.hasLossType()) return false; if (hasLossType()) { if (lossType_ != other.lossType_) return false; } if (hasNumClass() != other.hasNumClass()) return false; if (hasNumClass()) { if (getNumClass() != other.getNumClass()) return false; } if (hasDnn() != other.hasDnn()) return false; if (hasDnn()) { if (!getDnn() .equals(other.getDnn())) return false; } if (hasWeight() != other.hasWeight()) return false; if (hasWeight()) { if (java.lang.Float.floatToIntBits(getWeight()) != java.lang.Float.floatToIntBits( other.getWeight())) return false; } if (hasTaskSpaceIndicatorLabel() != other.hasTaskSpaceIndicatorLabel()) return false; if (hasTaskSpaceIndicatorLabel()) { if (!getTaskSpaceIndicatorLabel() .equals(other.getTaskSpaceIndicatorLabel())) return false; } if (hasInTaskSpaceWeight() != other.hasInTaskSpaceWeight()) return false; if (hasInTaskSpaceWeight()) { if (java.lang.Float.floatToIntBits(getInTaskSpaceWeight()) != java.lang.Float.floatToIntBits( other.getInTaskSpaceWeight())) return false; } if (hasOutTaskSpaceWeight() != other.hasOutTaskSpaceWeight()) return false; if (hasOutTaskSpaceWeight()) { if (java.lang.Float.floatToIntBits(getOutTaskSpaceWeight()) != java.lang.Float.floatToIntBits( other.getOutTaskSpaceWeight())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTowerName()) { hash = (37 * hash) + TOWER_NAME_FIELD_NUMBER; hash = (53 * hash) + getTowerName().hashCode(); } if (hasLabelName()) { hash = (37 * hash) + LABEL_NAME_FIELD_NUMBER; hash = (53 * hash) + getLabelName().hashCode(); } if (getMetricsSetCount() > 0) { hash = (37 * hash) + METRICS_SET_FIELD_NUMBER; hash = (53 * hash) + getMetricsSetList().hashCode(); } if (hasLossType()) { hash = (37 * hash) + LOSS_TYPE_FIELD_NUMBER; hash = (53 * hash) + lossType_; } if (hasNumClass()) { hash = (37 * hash) + NUM_CLASS_FIELD_NUMBER; hash = (53 * hash) + getNumClass(); } if (hasDnn()) { hash = (37 * hash) + DNN_FIELD_NUMBER; hash = (53 * hash) + getDnn().hashCode(); } if (hasWeight()) { hash = (37 * hash) + WEIGHT_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getWeight()); } if (hasTaskSpaceIndicatorLabel()) { hash = (37 * hash) + TASK_SPACE_INDICATOR_LABEL_FIELD_NUMBER; hash = (53 * hash) + getTaskSpaceIndicatorLabel().hashCode(); } if (hasInTaskSpaceWeight()) { hash = (37 * hash) + IN_TASK_SPACE_WEIGHT_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getInTaskSpaceWeight()); } if (hasOutTaskSpaceWeight()) { hash = (37 * hash) + OUT_TASK_SPACE_WEIGHT_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getOutTaskSpaceWeight()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code protos.TaskTower} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:protos.TaskTower) com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTowerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_TaskTower_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_TaskTower_fieldAccessorTable .ensureFieldAccessorsInitialized( com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower.class, com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower.Builder.class); } // Construct using com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMetricsSetFieldBuilder(); getDnnFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); towerName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); labelName_ = ""; bitField0_ = (bitField0_ & ~0x00000002); if (metricsSetBuilder_ == null) { metricsSet_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { metricsSetBuilder_.clear(); } lossType_ = 0; bitField0_ = (bitField0_ & ~0x00000008); numClass_ = 1; bitField0_ = (bitField0_ & ~0x00000010); if (dnnBuilder_ == null) { dnn_ = null; } else { dnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); weight_ = 1F; bitField0_ = (bitField0_ & ~0x00000040); taskSpaceIndicatorLabel_ = ""; bitField0_ = (bitField0_ & ~0x00000080); inTaskSpaceWeight_ = 1F; bitField0_ = (bitField0_ & ~0x00000100); outTaskSpaceWeight_ = 1F; bitField0_ = (bitField0_ & ~0x00000200); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_TaskTower_descriptor; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower getDefaultInstanceForType() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower.getDefaultInstance(); } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower build() { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower buildPartial() { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower result = new com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.towerName_ = towerName_; if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.labelName_ = labelName_; if (metricsSetBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { metricsSet_ = java.util.Collections.unmodifiableList(metricsSet_); bitField0_ = (bitField0_ & ~0x00000004); } result.metricsSet_ = metricsSet_; } else { result.metricsSet_ = metricsSetBuilder_.build(); } if (((from_bitField0_ & 0x00000008) != 0)) { to_bitField0_ |= 0x00000004; } result.lossType_ = lossType_; if (((from_bitField0_ & 0x00000010) != 0)) { to_bitField0_ |= 0x00000008; } result.numClass_ = numClass_; if (((from_bitField0_ & 0x00000020) != 0)) { if (dnnBuilder_ == null) { result.dnn_ = dnn_; } else { result.dnn_ = dnnBuilder_.build(); } to_bitField0_ |= 0x00000010; } if (((from_bitField0_ & 0x00000040) != 0)) { to_bitField0_ |= 0x00000020; } result.weight_ = weight_; if (((from_bitField0_ & 0x00000080) != 0)) { to_bitField0_ |= 0x00000040; } result.taskSpaceIndicatorLabel_ = taskSpaceIndicatorLabel_; if (((from_bitField0_ & 0x00000100) != 0)) { to_bitField0_ |= 0x00000080; } result.inTaskSpaceWeight_ = inTaskSpaceWeight_; if (((from_bitField0_ & 0x00000200) != 0)) { to_bitField0_ |= 0x00000100; } result.outTaskSpaceWeight_ = outTaskSpaceWeight_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower) { return mergeFrom((com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower other) { if (other == com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower.getDefaultInstance()) return this; if (other.hasTowerName()) { bitField0_ |= 0x00000001; towerName_ = other.towerName_; onChanged(); } if (other.hasLabelName()) { bitField0_ |= 0x00000002; labelName_ = other.labelName_; onChanged(); } if (metricsSetBuilder_ == null) { if (!other.metricsSet_.isEmpty()) { if (metricsSet_.isEmpty()) { metricsSet_ = other.metricsSet_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureMetricsSetIsMutable(); metricsSet_.addAll(other.metricsSet_); } onChanged(); } } else { if (!other.metricsSet_.isEmpty()) { if (metricsSetBuilder_.isEmpty()) { metricsSetBuilder_.dispose(); metricsSetBuilder_ = null; metricsSet_ = other.metricsSet_; bitField0_ = (bitField0_ & ~0x00000004); metricsSetBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMetricsSetFieldBuilder() : null; } else { metricsSetBuilder_.addAllMessages(other.metricsSet_); } } } if (other.hasLossType()) { setLossType(other.getLossType()); } if (other.hasNumClass()) { setNumClass(other.getNumClass()); } if (other.hasDnn()) { mergeDnn(other.getDnn()); } if (other.hasWeight()) { setWeight(other.getWeight()); } if (other.hasTaskSpaceIndicatorLabel()) { bitField0_ |= 0x00000080; taskSpaceIndicatorLabel_ = other.taskSpaceIndicatorLabel_; onChanged(); } if (other.hasInTaskSpaceWeight()) { setInTaskSpaceWeight(other.getInTaskSpaceWeight()); } if (other.hasOutTaskSpaceWeight()) { setOutTaskSpaceWeight(other.getOutTaskSpaceWeight()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasTowerName()) { return false; } for (int i = 0; i < getMetricsSetCount(); i++) { if (!getMetricsSet(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object towerName_ = ""; /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public boolean hasTowerName() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public java.lang.String getTowerName() { java.lang.Object ref = towerName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { towerName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public com.google.protobuf.ByteString getTowerNameBytes() { java.lang.Object ref = towerName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); towerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public Builder setTowerName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; towerName_ = value; onChanged(); return this; } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public Builder clearTowerName() { bitField0_ = (bitField0_ & ~0x00000001); towerName_ = getDefaultInstance().getTowerName(); onChanged(); return this; } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public Builder setTowerNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; towerName_ = value; onChanged(); return this; } private java.lang.Object labelName_ = ""; /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public boolean hasLabelName() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public java.lang.String getLabelName() { java.lang.Object ref = labelName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { labelName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public com.google.protobuf.ByteString getLabelNameBytes() { java.lang.Object ref = labelName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); labelName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public Builder setLabelName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; labelName_ = value; onChanged(); return this; } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public Builder clearLabelName() { bitField0_ = (bitField0_ & ~0x00000002); labelName_ = getDefaultInstance().getLabelName(); onChanged(); return this; } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public Builder setLabelNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; labelName_ = value; onChanged(); return this; } private java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> metricsSet_ = java.util.Collections.emptyList(); private void ensureMetricsSetIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { metricsSet_ = new java.util.ArrayList<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics>(metricsSet_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> metricsSetBuilder_; /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> getMetricsSetList() { if (metricsSetBuilder_ == null) { return java.util.Collections.unmodifiableList(metricsSet_); } else { return metricsSetBuilder_.getMessageList(); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public int getMetricsSetCount() { if (metricsSetBuilder_ == null) { return metricsSet_.size(); } else { return metricsSetBuilder_.getCount(); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics getMetricsSet(int index) { if (metricsSetBuilder_ == null) { return metricsSet_.get(index); } else { return metricsSetBuilder_.getMessage(index); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder setMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics value) { if (metricsSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsSetIsMutable(); metricsSet_.set(index, value); onChanged(); } else { metricsSetBuilder_.setMessage(index, value); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder setMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder builderForValue) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.set(index, builderForValue.build()); onChanged(); } else { metricsSetBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet(com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics value) { if (metricsSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsSetIsMutable(); metricsSet_.add(value); onChanged(); } else { metricsSetBuilder_.addMessage(value); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics value) { if (metricsSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsSetIsMutable(); metricsSet_.add(index, value); onChanged(); } else { metricsSetBuilder_.addMessage(index, value); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet( com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder builderForValue) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.add(builderForValue.build()); onChanged(); } else { metricsSetBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder builderForValue) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.add(index, builderForValue.build()); onChanged(); } else { metricsSetBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addAllMetricsSet( java.lang.Iterable<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> values) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, metricsSet_); onChanged(); } else { metricsSetBuilder_.addAllMessages(values); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder clearMetricsSet() { if (metricsSetBuilder_ == null) { metricsSet_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { metricsSetBuilder_.clear(); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder removeMetricsSet(int index) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.remove(index); onChanged(); } else { metricsSetBuilder_.remove(index); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder getMetricsSetBuilder( int index) { return getMetricsSetFieldBuilder().getBuilder(index); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder getMetricsSetOrBuilder( int index) { if (metricsSetBuilder_ == null) { return metricsSet_.get(index); } else { return metricsSetBuilder_.getMessageOrBuilder(index); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetOrBuilderList() { if (metricsSetBuilder_ != null) { return metricsSetBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(metricsSet_); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder addMetricsSetBuilder() { return getMetricsSetFieldBuilder().addBuilder( com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.getDefaultInstance()); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder addMetricsSetBuilder( int index) { return getMetricsSetFieldBuilder().addBuilder( index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.getDefaultInstance()); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder> getMetricsSetBuilderList() { return getMetricsSetFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetFieldBuilder() { if (metricsSetBuilder_ == null) { metricsSetBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder>( metricsSet_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); metricsSet_ = null; } return metricsSetBuilder_; } private int lossType_ = 0; /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public boolean hasLossType() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType getLossType() { @SuppressWarnings("deprecation") com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType result = com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.valueOf(lossType_); return result == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.CLASSIFICATION : result; } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public Builder setLossType(com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; lossType_ = value.getNumber(); onChanged(); return this; } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public Builder clearLossType() { bitField0_ = (bitField0_ & ~0x00000008); lossType_ = 0; onChanged(); return this; } private int numClass_ = 1; /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public boolean hasNumClass() { return ((bitField0_ & 0x00000010) != 0); } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public int getNumClass() { return numClass_; } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public Builder setNumClass(int value) { bitField0_ |= 0x00000010; numClass_ = value; onChanged(); return this; } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public Builder clearNumClass() { bitField0_ = (bitField0_ & ~0x00000010); numClass_ = 1; onChanged(); return this; } private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN dnn_; private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> dnnBuilder_; /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public boolean hasDnn() { return ((bitField0_ & 0x00000020) != 0); } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn() { if (dnnBuilder_ == null) { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } else { return dnnBuilder_.getMessage(); } } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder setDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (dnnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } dnn_ = value; onChanged(); } else { dnnBuilder_.setMessage(value); } bitField0_ |= 0x00000020; return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder setDnn( com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder builderForValue) { if (dnnBuilder_ == null) { dnn_ = builderForValue.build(); onChanged(); } else { dnnBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder mergeDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (dnnBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0) && dnn_ != null && dnn_ != com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance()) { dnn_ = com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.newBuilder(dnn_).mergeFrom(value).buildPartial(); } else { dnn_ = value; } onChanged(); } else { dnnBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder clearDnn() { if (dnnBuilder_ == null) { dnn_ = null; onChanged(); } else { dnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder getDnnBuilder() { bitField0_ |= 0x00000020; onChanged(); return getDnnFieldBuilder().getBuilder(); } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder() { if (dnnBuilder_ != null) { return dnnBuilder_.getMessageOrBuilder(); } else { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> getDnnFieldBuilder() { if (dnnBuilder_ == null) { dnnBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder>( getDnn(), getParentForChildren(), isClean()); dnn_ = null; } return dnnBuilder_; } private float weight_ = 1F; /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ public boolean hasWeight() { return ((bitField0_ & 0x00000040) != 0); } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ public float getWeight() { return weight_; } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ public Builder setWeight(float value) { bitField0_ |= 0x00000040; weight_ = value; onChanged(); return this; } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 7 [default = 1];</code> */ public Builder clearWeight() { bitField0_ = (bitField0_ & ~0x00000040); weight_ = 1F; onChanged(); return this; } private java.lang.Object taskSpaceIndicatorLabel_ = ""; /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public boolean hasTaskSpaceIndicatorLabel() { return ((bitField0_ & 0x00000080) != 0); } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public java.lang.String getTaskSpaceIndicatorLabel() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { taskSpaceIndicatorLabel_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public com.google.protobuf.ByteString getTaskSpaceIndicatorLabelBytes() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); taskSpaceIndicatorLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public Builder setTaskSpaceIndicatorLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000080; taskSpaceIndicatorLabel_ = value; onChanged(); return this; } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public Builder clearTaskSpaceIndicatorLabel() { bitField0_ = (bitField0_ & ~0x00000080); taskSpaceIndicatorLabel_ = getDefaultInstance().getTaskSpaceIndicatorLabel(); onChanged(); return this; } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public Builder setTaskSpaceIndicatorLabelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000080; taskSpaceIndicatorLabel_ = value; onChanged(); return this; } private float inTaskSpaceWeight_ = 1F; /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public boolean hasInTaskSpaceWeight() { return ((bitField0_ & 0x00000100) != 0); } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public float getInTaskSpaceWeight() { return inTaskSpaceWeight_; } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public Builder setInTaskSpaceWeight(float value) { bitField0_ |= 0x00000100; inTaskSpaceWeight_ = value; onChanged(); return this; } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public Builder clearInTaskSpaceWeight() { bitField0_ = (bitField0_ & ~0x00000100); inTaskSpaceWeight_ = 1F; onChanged(); return this; } private float outTaskSpaceWeight_ = 1F; /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public boolean hasOutTaskSpaceWeight() { return ((bitField0_ & 0x00000200) != 0); } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public float getOutTaskSpaceWeight() { return outTaskSpaceWeight_; } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public Builder setOutTaskSpaceWeight(float value) { bitField0_ |= 0x00000200; outTaskSpaceWeight_ = value; onChanged(); return this; } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public Builder clearOutTaskSpaceWeight() { bitField0_ = (bitField0_ & ~0x00000200); outTaskSpaceWeight_ = 1F; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:protos.TaskTower) } // @@protoc_insertion_point(class_scope:protos.TaskTower) private static final com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower(); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final com.google.protobuf.Parser<TaskTower> PARSER = new com.google.protobuf.AbstractParser<TaskTower>() { @java.lang.Override public TaskTower parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TaskTower(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TaskTower> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TaskTower> getParserForType() { return PARSER; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.TaskTower getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } public interface BayesTaskTowerOrBuilder extends // @@protoc_insertion_point(interface_extends:protos.BayesTaskTower) com.google.protobuf.MessageOrBuilder { /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ boolean hasTowerName(); /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ java.lang.String getTowerName(); /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ com.google.protobuf.ByteString getTowerNameBytes(); /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ boolean hasLabelName(); /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ java.lang.String getLabelName(); /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ com.google.protobuf.ByteString getLabelNameBytes(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> getMetricsSetList(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics getMetricsSet(int index); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ int getMetricsSetCount(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ java.util.List<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetOrBuilderList(); /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder getMetricsSetOrBuilder( int index); /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ boolean hasLossType(); /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType getLossType(); /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ boolean hasNumClass(); /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ int getNumClass(); /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ boolean hasDnn(); /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn(); /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder(); /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ java.util.List<java.lang.String> getRelationTowerNamesList(); /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ int getRelationTowerNamesCount(); /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ java.lang.String getRelationTowerNames(int index); /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ com.google.protobuf.ByteString getRelationTowerNamesBytes(int index); /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ boolean hasRelationDnn(); /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getRelationDnn(); /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getRelationDnnOrBuilder(); /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ boolean hasWeight(); /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ float getWeight(); /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ boolean hasTaskSpaceIndicatorLabel(); /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ java.lang.String getTaskSpaceIndicatorLabel(); /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ com.google.protobuf.ByteString getTaskSpaceIndicatorLabelBytes(); /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ boolean hasInTaskSpaceWeight(); /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ float getInTaskSpaceWeight(); /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ boolean hasOutTaskSpaceWeight(); /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ float getOutTaskSpaceWeight(); } /** * Protobuf type {@code protos.BayesTaskTower} */ public static final class BayesTaskTower extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:protos.BayesTaskTower) BayesTaskTowerOrBuilder { private static final long serialVersionUID = 0L; // Use BayesTaskTower.newBuilder() to construct. private BayesTaskTower(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BayesTaskTower() { towerName_ = ""; labelName_ = ""; metricsSet_ = java.util.Collections.emptyList(); lossType_ = 0; numClass_ = 1; relationTowerNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; weight_ = 1F; taskSpaceIndicatorLabel_ = ""; inTaskSpaceWeight_ = 1F; outTaskSpaceWeight_ = 1F; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new BayesTaskTower(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BayesTaskTower( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; towerName_ = bs; break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; labelName_ = bs; break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) != 0)) { metricsSet_ = new java.util.ArrayList<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics>(); mutable_bitField0_ |= 0x00000004; } metricsSet_.add( input.readMessage(com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.PARSER, extensionRegistry)); break; } case 32: { int rawValue = input.readEnum(); @SuppressWarnings("deprecation") com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType value = com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(4, rawValue); } else { bitField0_ |= 0x00000004; lossType_ = rawValue; } break; } case 40: { bitField0_ |= 0x00000008; numClass_ = input.readUInt32(); break; } case 50: { com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder subBuilder = null; if (((bitField0_ & 0x00000010) != 0)) { subBuilder = dnn_.toBuilder(); } dnn_ = input.readMessage(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(dnn_); dnn_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000010; break; } case 58: { com.google.protobuf.ByteString bs = input.readBytes(); if (!((mutable_bitField0_ & 0x00000040) != 0)) { relationTowerNames_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000040; } relationTowerNames_.add(bs); break; } case 66: { com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder subBuilder = null; if (((bitField0_ & 0x00000020) != 0)) { subBuilder = relationDnn_.toBuilder(); } relationDnn_ = input.readMessage(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(relationDnn_); relationDnn_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000020; break; } case 77: { bitField0_ |= 0x00000040; weight_ = input.readFloat(); break; } case 82: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000080; taskSpaceIndicatorLabel_ = bs; break; } case 93: { bitField0_ |= 0x00000100; inTaskSpaceWeight_ = input.readFloat(); break; } case 101: { bitField0_ |= 0x00000200; outTaskSpaceWeight_ = input.readFloat(); break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) != 0)) { metricsSet_ = java.util.Collections.unmodifiableList(metricsSet_); } if (((mutable_bitField0_ & 0x00000040) != 0)) { relationTowerNames_ = relationTowerNames_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_BayesTaskTower_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_BayesTaskTower_fieldAccessorTable .ensureFieldAccessorsInitialized( com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower.class, com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower.Builder.class); } private int bitField0_; public static final int TOWER_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object towerName_; /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public boolean hasTowerName() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public java.lang.String getTowerName() { java.lang.Object ref = towerName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { towerName_ = s; } return s; } } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public com.google.protobuf.ByteString getTowerNameBytes() { java.lang.Object ref = towerName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); towerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int LABEL_NAME_FIELD_NUMBER = 2; private volatile java.lang.Object labelName_; /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public boolean hasLabelName() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public java.lang.String getLabelName() { java.lang.Object ref = labelName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { labelName_ = s; } return s; } } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public com.google.protobuf.ByteString getLabelNameBytes() { java.lang.Object ref = labelName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); labelName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int METRICS_SET_FIELD_NUMBER = 3; private java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> metricsSet_; /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> getMetricsSetList() { return metricsSet_; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetOrBuilderList() { return metricsSet_; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public int getMetricsSetCount() { return metricsSet_.size(); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics getMetricsSet(int index) { return metricsSet_.get(index); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder getMetricsSetOrBuilder( int index) { return metricsSet_.get(index); } public static final int LOSS_TYPE_FIELD_NUMBER = 4; private int lossType_; /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public boolean hasLossType() { return ((bitField0_ & 0x00000004) != 0); } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType getLossType() { @SuppressWarnings("deprecation") com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType result = com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.valueOf(lossType_); return result == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.CLASSIFICATION : result; } public static final int NUM_CLASS_FIELD_NUMBER = 5; private int numClass_; /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public boolean hasNumClass() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public int getNumClass() { return numClass_; } public static final int DNN_FIELD_NUMBER = 6; private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN dnn_; /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public boolean hasDnn() { return ((bitField0_ & 0x00000010) != 0); } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn() { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder() { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } public static final int RELATION_TOWER_NAMES_FIELD_NUMBER = 7; private com.google.protobuf.LazyStringList relationTowerNames_; /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public com.google.protobuf.ProtocolStringList getRelationTowerNamesList() { return relationTowerNames_; } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public int getRelationTowerNamesCount() { return relationTowerNames_.size(); } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public java.lang.String getRelationTowerNames(int index) { return relationTowerNames_.get(index); } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public com.google.protobuf.ByteString getRelationTowerNamesBytes(int index) { return relationTowerNames_.getByteString(index); } public static final int RELATION_DNN_FIELD_NUMBER = 8; private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN relationDnn_; /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public boolean hasRelationDnn() { return ((bitField0_ & 0x00000020) != 0); } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getRelationDnn() { return relationDnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : relationDnn_; } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getRelationDnnOrBuilder() { return relationDnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : relationDnn_; } public static final int WEIGHT_FIELD_NUMBER = 9; private float weight_; /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ public boolean hasWeight() { return ((bitField0_ & 0x00000040) != 0); } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ public float getWeight() { return weight_; } public static final int TASK_SPACE_INDICATOR_LABEL_FIELD_NUMBER = 10; private volatile java.lang.Object taskSpaceIndicatorLabel_; /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public boolean hasTaskSpaceIndicatorLabel() { return ((bitField0_ & 0x00000080) != 0); } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public java.lang.String getTaskSpaceIndicatorLabel() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { taskSpaceIndicatorLabel_ = s; } return s; } } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public com.google.protobuf.ByteString getTaskSpaceIndicatorLabelBytes() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); taskSpaceIndicatorLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int IN_TASK_SPACE_WEIGHT_FIELD_NUMBER = 11; private float inTaskSpaceWeight_; /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public boolean hasInTaskSpaceWeight() { return ((bitField0_ & 0x00000100) != 0); } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public float getInTaskSpaceWeight() { return inTaskSpaceWeight_; } public static final int OUT_TASK_SPACE_WEIGHT_FIELD_NUMBER = 12; private float outTaskSpaceWeight_; /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public boolean hasOutTaskSpaceWeight() { return ((bitField0_ & 0x00000200) != 0); } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public float getOutTaskSpaceWeight() { return outTaskSpaceWeight_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!hasTowerName()) { memoizedIsInitialized = 0; return false; } for (int i = 0; i < getMetricsSetCount(); i++) { if (!getMetricsSet(i).isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, towerName_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, labelName_); } for (int i = 0; i < metricsSet_.size(); i++) { output.writeMessage(3, metricsSet_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { output.writeEnum(4, lossType_); } if (((bitField0_ & 0x00000008) != 0)) { output.writeUInt32(5, numClass_); } if (((bitField0_ & 0x00000010) != 0)) { output.writeMessage(6, getDnn()); } for (int i = 0; i < relationTowerNames_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 7, relationTowerNames_.getRaw(i)); } if (((bitField0_ & 0x00000020) != 0)) { output.writeMessage(8, getRelationDnn()); } if (((bitField0_ & 0x00000040) != 0)) { output.writeFloat(9, weight_); } if (((bitField0_ & 0x00000080) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, taskSpaceIndicatorLabel_); } if (((bitField0_ & 0x00000100) != 0)) { output.writeFloat(11, inTaskSpaceWeight_); } if (((bitField0_ & 0x00000200) != 0)) { output.writeFloat(12, outTaskSpaceWeight_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, towerName_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, labelName_); } for (int i = 0; i < metricsSet_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, metricsSet_.get(i)); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, lossType_); } if (((bitField0_ & 0x00000008) != 0)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(5, numClass_); } if (((bitField0_ & 0x00000010) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, getDnn()); } { int dataSize = 0; for (int i = 0; i < relationTowerNames_.size(); i++) { dataSize += computeStringSizeNoTag(relationTowerNames_.getRaw(i)); } size += dataSize; size += 1 * getRelationTowerNamesList().size(); } if (((bitField0_ & 0x00000020) != 0)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(8, getRelationDnn()); } if (((bitField0_ & 0x00000040) != 0)) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(9, weight_); } if (((bitField0_ & 0x00000080) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, taskSpaceIndicatorLabel_); } if (((bitField0_ & 0x00000100) != 0)) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(11, inTaskSpaceWeight_); } if (((bitField0_ & 0x00000200) != 0)) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(12, outTaskSpaceWeight_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower)) { return super.equals(obj); } com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower other = (com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower) obj; if (hasTowerName() != other.hasTowerName()) return false; if (hasTowerName()) { if (!getTowerName() .equals(other.getTowerName())) return false; } if (hasLabelName() != other.hasLabelName()) return false; if (hasLabelName()) { if (!getLabelName() .equals(other.getLabelName())) return false; } if (!getMetricsSetList() .equals(other.getMetricsSetList())) return false; if (hasLossType() != other.hasLossType()) return false; if (hasLossType()) { if (lossType_ != other.lossType_) return false; } if (hasNumClass() != other.hasNumClass()) return false; if (hasNumClass()) { if (getNumClass() != other.getNumClass()) return false; } if (hasDnn() != other.hasDnn()) return false; if (hasDnn()) { if (!getDnn() .equals(other.getDnn())) return false; } if (!getRelationTowerNamesList() .equals(other.getRelationTowerNamesList())) return false; if (hasRelationDnn() != other.hasRelationDnn()) return false; if (hasRelationDnn()) { if (!getRelationDnn() .equals(other.getRelationDnn())) return false; } if (hasWeight() != other.hasWeight()) return false; if (hasWeight()) { if (java.lang.Float.floatToIntBits(getWeight()) != java.lang.Float.floatToIntBits( other.getWeight())) return false; } if (hasTaskSpaceIndicatorLabel() != other.hasTaskSpaceIndicatorLabel()) return false; if (hasTaskSpaceIndicatorLabel()) { if (!getTaskSpaceIndicatorLabel() .equals(other.getTaskSpaceIndicatorLabel())) return false; } if (hasInTaskSpaceWeight() != other.hasInTaskSpaceWeight()) return false; if (hasInTaskSpaceWeight()) { if (java.lang.Float.floatToIntBits(getInTaskSpaceWeight()) != java.lang.Float.floatToIntBits( other.getInTaskSpaceWeight())) return false; } if (hasOutTaskSpaceWeight() != other.hasOutTaskSpaceWeight()) return false; if (hasOutTaskSpaceWeight()) { if (java.lang.Float.floatToIntBits(getOutTaskSpaceWeight()) != java.lang.Float.floatToIntBits( other.getOutTaskSpaceWeight())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTowerName()) { hash = (37 * hash) + TOWER_NAME_FIELD_NUMBER; hash = (53 * hash) + getTowerName().hashCode(); } if (hasLabelName()) { hash = (37 * hash) + LABEL_NAME_FIELD_NUMBER; hash = (53 * hash) + getLabelName().hashCode(); } if (getMetricsSetCount() > 0) { hash = (37 * hash) + METRICS_SET_FIELD_NUMBER; hash = (53 * hash) + getMetricsSetList().hashCode(); } if (hasLossType()) { hash = (37 * hash) + LOSS_TYPE_FIELD_NUMBER; hash = (53 * hash) + lossType_; } if (hasNumClass()) { hash = (37 * hash) + NUM_CLASS_FIELD_NUMBER; hash = (53 * hash) + getNumClass(); } if (hasDnn()) { hash = (37 * hash) + DNN_FIELD_NUMBER; hash = (53 * hash) + getDnn().hashCode(); } if (getRelationTowerNamesCount() > 0) { hash = (37 * hash) + RELATION_TOWER_NAMES_FIELD_NUMBER; hash = (53 * hash) + getRelationTowerNamesList().hashCode(); } if (hasRelationDnn()) { hash = (37 * hash) + RELATION_DNN_FIELD_NUMBER; hash = (53 * hash) + getRelationDnn().hashCode(); } if (hasWeight()) { hash = (37 * hash) + WEIGHT_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getWeight()); } if (hasTaskSpaceIndicatorLabel()) { hash = (37 * hash) + TASK_SPACE_INDICATOR_LABEL_FIELD_NUMBER; hash = (53 * hash) + getTaskSpaceIndicatorLabel().hashCode(); } if (hasInTaskSpaceWeight()) { hash = (37 * hash) + IN_TASK_SPACE_WEIGHT_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getInTaskSpaceWeight()); } if (hasOutTaskSpaceWeight()) { hash = (37 * hash) + OUT_TASK_SPACE_WEIGHT_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits( getOutTaskSpaceWeight()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code protos.BayesTaskTower} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:protos.BayesTaskTower) com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTowerOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_BayesTaskTower_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_BayesTaskTower_fieldAccessorTable .ensureFieldAccessorsInitialized( com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower.class, com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower.Builder.class); } // Construct using com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getMetricsSetFieldBuilder(); getDnnFieldBuilder(); getRelationDnnFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); towerName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); labelName_ = ""; bitField0_ = (bitField0_ & ~0x00000002); if (metricsSetBuilder_ == null) { metricsSet_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { metricsSetBuilder_.clear(); } lossType_ = 0; bitField0_ = (bitField0_ & ~0x00000008); numClass_ = 1; bitField0_ = (bitField0_ & ~0x00000010); if (dnnBuilder_ == null) { dnn_ = null; } else { dnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); relationTowerNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000040); if (relationDnnBuilder_ == null) { relationDnn_ = null; } else { relationDnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); weight_ = 1F; bitField0_ = (bitField0_ & ~0x00000100); taskSpaceIndicatorLabel_ = ""; bitField0_ = (bitField0_ & ~0x00000200); inTaskSpaceWeight_ = 1F; bitField0_ = (bitField0_ & ~0x00000400); outTaskSpaceWeight_ = 1F; bitField0_ = (bitField0_ & ~0x00000800); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.internal_static_protos_BayesTaskTower_descriptor; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower getDefaultInstanceForType() { return com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower.getDefaultInstance(); } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower build() { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower buildPartial() { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower result = new com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.towerName_ = towerName_; if (((from_bitField0_ & 0x00000002) != 0)) { to_bitField0_ |= 0x00000002; } result.labelName_ = labelName_; if (metricsSetBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0)) { metricsSet_ = java.util.Collections.unmodifiableList(metricsSet_); bitField0_ = (bitField0_ & ~0x00000004); } result.metricsSet_ = metricsSet_; } else { result.metricsSet_ = metricsSetBuilder_.build(); } if (((from_bitField0_ & 0x00000008) != 0)) { to_bitField0_ |= 0x00000004; } result.lossType_ = lossType_; if (((from_bitField0_ & 0x00000010) != 0)) { to_bitField0_ |= 0x00000008; } result.numClass_ = numClass_; if (((from_bitField0_ & 0x00000020) != 0)) { if (dnnBuilder_ == null) { result.dnn_ = dnn_; } else { result.dnn_ = dnnBuilder_.build(); } to_bitField0_ |= 0x00000010; } if (((bitField0_ & 0x00000040) != 0)) { relationTowerNames_ = relationTowerNames_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000040); } result.relationTowerNames_ = relationTowerNames_; if (((from_bitField0_ & 0x00000080) != 0)) { if (relationDnnBuilder_ == null) { result.relationDnn_ = relationDnn_; } else { result.relationDnn_ = relationDnnBuilder_.build(); } to_bitField0_ |= 0x00000020; } if (((from_bitField0_ & 0x00000100) != 0)) { to_bitField0_ |= 0x00000040; } result.weight_ = weight_; if (((from_bitField0_ & 0x00000200) != 0)) { to_bitField0_ |= 0x00000080; } result.taskSpaceIndicatorLabel_ = taskSpaceIndicatorLabel_; if (((from_bitField0_ & 0x00000400) != 0)) { to_bitField0_ |= 0x00000100; } result.inTaskSpaceWeight_ = inTaskSpaceWeight_; if (((from_bitField0_ & 0x00000800) != 0)) { to_bitField0_ |= 0x00000200; } result.outTaskSpaceWeight_ = outTaskSpaceWeight_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower) { return mergeFrom((com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower other) { if (other == com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower.getDefaultInstance()) return this; if (other.hasTowerName()) { bitField0_ |= 0x00000001; towerName_ = other.towerName_; onChanged(); } if (other.hasLabelName()) { bitField0_ |= 0x00000002; labelName_ = other.labelName_; onChanged(); } if (metricsSetBuilder_ == null) { if (!other.metricsSet_.isEmpty()) { if (metricsSet_.isEmpty()) { metricsSet_ = other.metricsSet_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureMetricsSetIsMutable(); metricsSet_.addAll(other.metricsSet_); } onChanged(); } } else { if (!other.metricsSet_.isEmpty()) { if (metricsSetBuilder_.isEmpty()) { metricsSetBuilder_.dispose(); metricsSetBuilder_ = null; metricsSet_ = other.metricsSet_; bitField0_ = (bitField0_ & ~0x00000004); metricsSetBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMetricsSetFieldBuilder() : null; } else { metricsSetBuilder_.addAllMessages(other.metricsSet_); } } } if (other.hasLossType()) { setLossType(other.getLossType()); } if (other.hasNumClass()) { setNumClass(other.getNumClass()); } if (other.hasDnn()) { mergeDnn(other.getDnn()); } if (!other.relationTowerNames_.isEmpty()) { if (relationTowerNames_.isEmpty()) { relationTowerNames_ = other.relationTowerNames_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureRelationTowerNamesIsMutable(); relationTowerNames_.addAll(other.relationTowerNames_); } onChanged(); } if (other.hasRelationDnn()) { mergeRelationDnn(other.getRelationDnn()); } if (other.hasWeight()) { setWeight(other.getWeight()); } if (other.hasTaskSpaceIndicatorLabel()) { bitField0_ |= 0x00000200; taskSpaceIndicatorLabel_ = other.taskSpaceIndicatorLabel_; onChanged(); } if (other.hasInTaskSpaceWeight()) { setInTaskSpaceWeight(other.getInTaskSpaceWeight()); } if (other.hasOutTaskSpaceWeight()) { setOutTaskSpaceWeight(other.getOutTaskSpaceWeight()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { if (!hasTowerName()) { return false; } for (int i = 0; i < getMetricsSetCount(); i++) { if (!getMetricsSet(i).isInitialized()) { return false; } } return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object towerName_ = ""; /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public boolean hasTowerName() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public java.lang.String getTowerName() { java.lang.Object ref = towerName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { towerName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public com.google.protobuf.ByteString getTowerNameBytes() { java.lang.Object ref = towerName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); towerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public Builder setTowerName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; towerName_ = value; onChanged(); return this; } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public Builder clearTowerName() { bitField0_ = (bitField0_ & ~0x00000001); towerName_ = getDefaultInstance().getTowerName(); onChanged(); return this; } /** * <pre> * task name for the task tower * </pre> * * <code>required string tower_name = 1;</code> */ public Builder setTowerNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; towerName_ = value; onChanged(); return this; } private java.lang.Object labelName_ = ""; /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public boolean hasLabelName() { return ((bitField0_ & 0x00000002) != 0); } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public java.lang.String getLabelName() { java.lang.Object ref = labelName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { labelName_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public com.google.protobuf.ByteString getLabelNameBytes() { java.lang.Object ref = labelName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); labelName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public Builder setLabelName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; labelName_ = value; onChanged(); return this; } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public Builder clearLabelName() { bitField0_ = (bitField0_ & ~0x00000002); labelName_ = getDefaultInstance().getLabelName(); onChanged(); return this; } /** * <pre> * label for the task, default is label_fields by order * </pre> * * <code>optional string label_name = 2;</code> */ public Builder setLabelNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; labelName_ = value; onChanged(); return this; } private java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> metricsSet_ = java.util.Collections.emptyList(); private void ensureMetricsSetIsMutable() { if (!((bitField0_ & 0x00000004) != 0)) { metricsSet_ = new java.util.ArrayList<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics>(metricsSet_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> metricsSetBuilder_; /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> getMetricsSetList() { if (metricsSetBuilder_ == null) { return java.util.Collections.unmodifiableList(metricsSet_); } else { return metricsSetBuilder_.getMessageList(); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public int getMetricsSetCount() { if (metricsSetBuilder_ == null) { return metricsSet_.size(); } else { return metricsSetBuilder_.getCount(); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics getMetricsSet(int index) { if (metricsSetBuilder_ == null) { return metricsSet_.get(index); } else { return metricsSetBuilder_.getMessage(index); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder setMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics value) { if (metricsSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsSetIsMutable(); metricsSet_.set(index, value); onChanged(); } else { metricsSetBuilder_.setMessage(index, value); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder setMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder builderForValue) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.set(index, builderForValue.build()); onChanged(); } else { metricsSetBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet(com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics value) { if (metricsSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsSetIsMutable(); metricsSet_.add(value); onChanged(); } else { metricsSetBuilder_.addMessage(value); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics value) { if (metricsSetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMetricsSetIsMutable(); metricsSet_.add(index, value); onChanged(); } else { metricsSetBuilder_.addMessage(index, value); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet( com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder builderForValue) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.add(builderForValue.build()); onChanged(); } else { metricsSetBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addMetricsSet( int index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder builderForValue) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.add(index, builderForValue.build()); onChanged(); } else { metricsSetBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder addAllMetricsSet( java.lang.Iterable<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics> values) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, metricsSet_); onChanged(); } else { metricsSetBuilder_.addAllMessages(values); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder clearMetricsSet() { if (metricsSetBuilder_ == null) { metricsSet_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { metricsSetBuilder_.clear(); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public Builder removeMetricsSet(int index) { if (metricsSetBuilder_ == null) { ensureMetricsSetIsMutable(); metricsSet_.remove(index); onChanged(); } else { metricsSetBuilder_.remove(index); } return this; } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder getMetricsSetBuilder( int index) { return getMetricsSetFieldBuilder().getBuilder(index); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder getMetricsSetOrBuilder( int index) { if (metricsSetBuilder_ == null) { return metricsSet_.get(index); } else { return metricsSetBuilder_.getMessageOrBuilder(index); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<? extends com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetOrBuilderList() { if (metricsSetBuilder_ != null) { return metricsSetBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(metricsSet_); } } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder addMetricsSetBuilder() { return getMetricsSetFieldBuilder().addBuilder( com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.getDefaultInstance()); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder addMetricsSetBuilder( int index) { return getMetricsSetFieldBuilder().addBuilder( index, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.getDefaultInstance()); } /** * <pre> * metrics for the task * </pre> * * <code>repeated .protos.EvalMetrics metrics_set = 3;</code> */ public java.util.List<com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder> getMetricsSetBuilderList() { return getMetricsSetFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder> getMetricsSetFieldBuilder() { if (metricsSetBuilder_ == null) { metricsSetBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetrics.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.EvalMetricsOrBuilder>( metricsSet_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); metricsSet_ = null; } return metricsSetBuilder_; } private int lossType_ = 0; /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public boolean hasLossType() { return ((bitField0_ & 0x00000008) != 0); } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType getLossType() { @SuppressWarnings("deprecation") com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType result = com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.valueOf(lossType_); return result == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType.CLASSIFICATION : result; } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public Builder setLossType(com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.LossType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; lossType_ = value.getNumber(); onChanged(); return this; } /** * <pre> * loss for the task * </pre> * * <code>optional .protos.LossType loss_type = 4 [default = CLASSIFICATION];</code> */ public Builder clearLossType() { bitField0_ = (bitField0_ & ~0x00000008); lossType_ = 0; onChanged(); return this; } private int numClass_ = 1; /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public boolean hasNumClass() { return ((bitField0_ & 0x00000010) != 0); } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public int getNumClass() { return numClass_; } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public Builder setNumClass(int value) { bitField0_ |= 0x00000010; numClass_ = value; onChanged(); return this; } /** * <pre> * num_class for multi-class classification loss * </pre> * * <code>optional uint32 num_class = 5 [default = 1];</code> */ public Builder clearNumClass() { bitField0_ = (bitField0_ & ~0x00000010); numClass_ = 1; onChanged(); return this; } private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN dnn_; private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> dnnBuilder_; /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public boolean hasDnn() { return ((bitField0_ & 0x00000020) != 0); } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getDnn() { if (dnnBuilder_ == null) { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } else { return dnnBuilder_.getMessage(); } } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder setDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (dnnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } dnn_ = value; onChanged(); } else { dnnBuilder_.setMessage(value); } bitField0_ |= 0x00000020; return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder setDnn( com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder builderForValue) { if (dnnBuilder_ == null) { dnn_ = builderForValue.build(); onChanged(); } else { dnnBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder mergeDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (dnnBuilder_ == null) { if (((bitField0_ & 0x00000020) != 0) && dnn_ != null && dnn_ != com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance()) { dnn_ = com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.newBuilder(dnn_).mergeFrom(value).buildPartial(); } else { dnn_ = value; } onChanged(); } else { dnnBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public Builder clearDnn() { if (dnnBuilder_ == null) { dnn_ = null; onChanged(); } else { dnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder getDnnBuilder() { bitField0_ |= 0x00000020; onChanged(); return getDnnFieldBuilder().getBuilder(); } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getDnnOrBuilder() { if (dnnBuilder_ != null) { return dnnBuilder_.getMessageOrBuilder(); } else { return dnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : dnn_; } } /** * <pre> * task specific dnn * </pre> * * <code>optional .protos.DNN dnn = 6;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> getDnnFieldBuilder() { if (dnnBuilder_ == null) { dnnBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder>( getDnn(), getParentForChildren(), isClean()); dnn_ = null; } return dnnBuilder_; } private com.google.protobuf.LazyStringList relationTowerNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureRelationTowerNamesIsMutable() { if (!((bitField0_ & 0x00000040) != 0)) { relationTowerNames_ = new com.google.protobuf.LazyStringArrayList(relationTowerNames_); bitField0_ |= 0x00000040; } } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public com.google.protobuf.ProtocolStringList getRelationTowerNamesList() { return relationTowerNames_.getUnmodifiableView(); } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public int getRelationTowerNamesCount() { return relationTowerNames_.size(); } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public java.lang.String getRelationTowerNames(int index) { return relationTowerNames_.get(index); } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public com.google.protobuf.ByteString getRelationTowerNamesBytes(int index) { return relationTowerNames_.getByteString(index); } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public Builder setRelationTowerNames( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureRelationTowerNamesIsMutable(); relationTowerNames_.set(index, value); onChanged(); return this; } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public Builder addRelationTowerNames( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureRelationTowerNamesIsMutable(); relationTowerNames_.add(value); onChanged(); return this; } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public Builder addAllRelationTowerNames( java.lang.Iterable<java.lang.String> values) { ensureRelationTowerNamesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, relationTowerNames_); onChanged(); return this; } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public Builder clearRelationTowerNames() { relationTowerNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000040); onChanged(); return this; } /** * <pre> * related tower names * </pre> * * <code>repeated string relation_tower_names = 7;</code> */ public Builder addRelationTowerNamesBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensureRelationTowerNamesIsMutable(); relationTowerNames_.add(value); onChanged(); return this; } private com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN relationDnn_; private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> relationDnnBuilder_; /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public boolean hasRelationDnn() { return ((bitField0_ & 0x00000080) != 0); } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN getRelationDnn() { if (relationDnnBuilder_ == null) { return relationDnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : relationDnn_; } else { return relationDnnBuilder_.getMessage(); } } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public Builder setRelationDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (relationDnnBuilder_ == null) { if (value == null) { throw new NullPointerException(); } relationDnn_ = value; onChanged(); } else { relationDnnBuilder_.setMessage(value); } bitField0_ |= 0x00000080; return this; } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public Builder setRelationDnn( com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder builderForValue) { if (relationDnnBuilder_ == null) { relationDnn_ = builderForValue.build(); onChanged(); } else { relationDnnBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000080; return this; } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public Builder mergeRelationDnn(com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN value) { if (relationDnnBuilder_ == null) { if (((bitField0_ & 0x00000080) != 0) && relationDnn_ != null && relationDnn_ != com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance()) { relationDnn_ = com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.newBuilder(relationDnn_).mergeFrom(value).buildPartial(); } else { relationDnn_ = value; } onChanged(); } else { relationDnnBuilder_.mergeFrom(value); } bitField0_ |= 0x00000080; return this; } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public Builder clearRelationDnn() { if (relationDnnBuilder_ == null) { relationDnn_ = null; onChanged(); } else { relationDnnBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000080); return this; } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder getRelationDnnBuilder() { bitField0_ |= 0x00000080; onChanged(); return getRelationDnnFieldBuilder().getBuilder(); } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ public com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder getRelationDnnOrBuilder() { if (relationDnnBuilder_ != null) { return relationDnnBuilder_.getMessageOrBuilder(); } else { return relationDnn_ == null ? com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.getDefaultInstance() : relationDnn_; } } /** * <pre> * relation dnn * </pre> * * <code>optional .protos.DNN relation_dnn = 8;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder> getRelationDnnFieldBuilder() { if (relationDnnBuilder_ == null) { relationDnnBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNN.Builder, com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.DNNOrBuilder>( getRelationDnn(), getParentForChildren(), isClean()); relationDnn_ = null; } return relationDnnBuilder_; } private float weight_ = 1F; /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ public boolean hasWeight() { return ((bitField0_ & 0x00000100) != 0); } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ public float getWeight() { return weight_; } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ public Builder setWeight(float value) { bitField0_ |= 0x00000100; weight_ = value; onChanged(); return this; } /** * <pre> * training loss weights * </pre> * * <code>optional float weight = 9 [default = 1];</code> */ public Builder clearWeight() { bitField0_ = (bitField0_ & ~0x00000100); weight_ = 1F; onChanged(); return this; } private java.lang.Object taskSpaceIndicatorLabel_ = ""; /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public boolean hasTaskSpaceIndicatorLabel() { return ((bitField0_ & 0x00000200) != 0); } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public java.lang.String getTaskSpaceIndicatorLabel() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { taskSpaceIndicatorLabel_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public com.google.protobuf.ByteString getTaskSpaceIndicatorLabelBytes() { java.lang.Object ref = taskSpaceIndicatorLabel_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); taskSpaceIndicatorLabel_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public Builder setTaskSpaceIndicatorLabel( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000200; taskSpaceIndicatorLabel_ = value; onChanged(); return this; } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public Builder clearTaskSpaceIndicatorLabel() { bitField0_ = (bitField0_ & ~0x00000200); taskSpaceIndicatorLabel_ = getDefaultInstance().getTaskSpaceIndicatorLabel(); onChanged(); return this; } /** * <pre> * label name for indcating the sample space for the task tower * </pre> * * <code>optional string task_space_indicator_label = 10;</code> */ public Builder setTaskSpaceIndicatorLabelBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000200; taskSpaceIndicatorLabel_ = value; onChanged(); return this; } private float inTaskSpaceWeight_ = 1F; /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public boolean hasInTaskSpaceWeight() { return ((bitField0_ & 0x00000400) != 0); } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public float getInTaskSpaceWeight() { return inTaskSpaceWeight_; } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public Builder setInTaskSpaceWeight(float value) { bitField0_ |= 0x00000400; inTaskSpaceWeight_ = value; onChanged(); return this; } /** * <pre> * the loss weight for sample in the task space * </pre> * * <code>optional float in_task_space_weight = 11 [default = 1];</code> */ public Builder clearInTaskSpaceWeight() { bitField0_ = (bitField0_ & ~0x00000400); inTaskSpaceWeight_ = 1F; onChanged(); return this; } private float outTaskSpaceWeight_ = 1F; /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public boolean hasOutTaskSpaceWeight() { return ((bitField0_ & 0x00000800) != 0); } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public float getOutTaskSpaceWeight() { return outTaskSpaceWeight_; } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public Builder setOutTaskSpaceWeight(float value) { bitField0_ |= 0x00000800; outTaskSpaceWeight_ = value; onChanged(); return this; } /** * <pre> * the loss weight for sample out the task space * </pre> * * <code>optional float out_task_space_weight = 12 [default = 1];</code> */ public Builder clearOutTaskSpaceWeight() { bitField0_ = (bitField0_ & ~0x00000800); outTaskSpaceWeight_ = 1F; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:protos.BayesTaskTower) } // @@protoc_insertion_point(class_scope:protos.BayesTaskTower) private static final com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower(); } public static com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final com.google.protobuf.Parser<BayesTaskTower> PARSER = new com.google.protobuf.AbstractParser<BayesTaskTower>() { @java.lang.Override public BayesTaskTower parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BayesTaskTower(input, extensionRegistry); } }; public static com.google.protobuf.Parser<BayesTaskTower> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BayesTaskTower> getParserForType() { return PARSER; } @java.lang.Override public com.alibaba.alink.operator.batch.dl.ctr.protos.TowerOuterClass.BayesTaskTower getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } private static final com.google.protobuf.Descriptors.Descriptor internal_static_protos_Tower_descriptor; private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_protos_Tower_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_protos_TaskTower_descriptor; private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_protos_TaskTower_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_protos_BayesTaskTower_descriptor; private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_protos_BayesTaskTower_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\"easy_rec/python/protos/tower.proto\022\006pr" + "otos\032!easy_rec/python/protos/eval.proto\032" + "!easy_rec/python/protos/loss.proto\032 easy" + "_rec/python/protos/dnn.proto\"0\n\005Tower\022\r\n" + "\005input\030\001 \002(\t\022\030\n\003dnn\030\002 \002(\0132\013.protos.DNN\"\274" + "\002\n\tTaskTower\022\022\n\ntower_name\030\001 \002(\t\022\022\n\nlabe" + "l_name\030\002 \001(\t\022(\n\013metrics_set\030\003 \003(\0132\023.prot" + "os.EvalMetrics\0223\n\tloss_type\030\004 \001(\0162\020.prot" + "os.LossType:\016CLASSIFICATION\022\024\n\tnum_class" + "\030\005 \001(\r:\0011\022\030\n\003dnn\030\006 \001(\0132\013.protos.DNN\022\021\n\006w" + "eight\030\007 \001(\002:\0011\022\"\n\032task_space_indicator_l" + "abel\030\n \001(\t\022\037\n\024in_task_space_weight\030\013 \001(\002" + ":\0011\022 \n\025out_task_space_weight\030\014 \001(\002:\0011\"\202\003" + "\n\016BayesTaskTower\022\022\n\ntower_name\030\001 \002(\t\022\022\n\n" + "label_name\030\002 \001(\t\022(\n\013metrics_set\030\003 \003(\0132\023." + "protos.EvalMetrics\0223\n\tloss_type\030\004 \001(\0162\020." + "protos.LossType:\016CLASSIFICATION\022\024\n\tnum_c" + "lass\030\005 \001(\r:\0011\022\030\n\003dnn\030\006 \001(\0132\013.protos.DNN\022" + "\034\n\024relation_tower_names\030\007 \003(\t\022!\n\014relatio" + "n_dnn\030\010 \001(\0132\013.protos.DNN\022\021\n\006weight\030\t \001(\002" + ":\0011\022\"\n\032task_space_indicator_label\030\n \001(\t\022" + "\037\n\024in_task_space_weight\030\013 \001(\002:\0011\022 \n\025out_" + "task_space_weight\030\014 \001(\002:\0011B0\n.com.alibab" + "a.alink.operator.batch.dl.ctr.protos" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.getDescriptor(), com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.getDescriptor(), com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.getDescriptor(), }); internal_static_protos_Tower_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_protos_Tower_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_protos_Tower_descriptor, new java.lang.String[] { "Input", "Dnn", }); internal_static_protos_TaskTower_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_protos_TaskTower_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_protos_TaskTower_descriptor, new java.lang.String[] { "TowerName", "LabelName", "MetricsSet", "LossType", "NumClass", "Dnn", "Weight", "TaskSpaceIndicatorLabel", "InTaskSpaceWeight", "OutTaskSpaceWeight", }); internal_static_protos_BayesTaskTower_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_protos_BayesTaskTower_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_protos_BayesTaskTower_descriptor, new java.lang.String[] { "TowerName", "LabelName", "MetricsSet", "LossType", "NumClass", "Dnn", "RelationTowerNames", "RelationDnn", "Weight", "TaskSpaceIndicatorLabel", "InTaskSpaceWeight", "OutTaskSpaceWeight", }); com.alibaba.alink.operator.batch.dl.ctr.protos.Eval.getDescriptor(); com.alibaba.alink.operator.batch.dl.ctr.protos.Loss.getDescriptor(); com.alibaba.alink.operator.batch.dl.ctr.protos.Dnn.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
97,684
14,668
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_SAFE_BROWSING_CONTENT_BROWSER_PASSWORD_PROTECTION_PASSWORD_PROTECTION_NAVIGATION_THROTTLE_H_ #define COMPONENTS_SAFE_BROWSING_CONTENT_BROWSER_PASSWORD_PROTECTION_PASSWORD_PROTECTION_NAVIGATION_THROTTLE_H_ #include "base/memory/ref_counted.h" #include "content/public/browser/navigation_throttle.h" namespace content { class NavigationHandle; } // namespace content namespace safe_browsing { class PasswordProtectionRequestContent; // PasswordProtectionNavigationThrottle defers or cancel navigation under the // following condition: // (1) if a navigation starts when there is a on-going sync password reuse ping, // this throttle defers this navigation. When the verdict comes back, if the // verdict results in showing a modal warning dialog, the deferred // navigation will be canceled; otherwise, the deferred navigation will be // resumed. // (2) if a navigation starts when there is a modal warning showing, this // throttle simply cancels this navigation. class PasswordProtectionNavigationThrottle : public content::NavigationThrottle { public: PasswordProtectionNavigationThrottle( content::NavigationHandle* navigation_handle, scoped_refptr<PasswordProtectionRequestContent> request, bool is_warning_showing); PasswordProtectionNavigationThrottle( const PasswordProtectionNavigationThrottle&) = delete; PasswordProtectionNavigationThrottle& operator=( const PasswordProtectionNavigationThrottle&) = delete; ~PasswordProtectionNavigationThrottle() override; // content::NavigationThrottle: content::NavigationThrottle::ThrottleCheckResult WillStartRequest() override; content::NavigationThrottle::ThrottleCheckResult WillRedirectRequest() override; const char* GetNameForLogging() override; // Called to resume a deferred navigation once the // PasswordProtectionRequestContent has received a verdict and there is no // modal warning shown. void ResumeNavigation(); // Called when the PasswordProtectionRequestContent has received a verdict and // there is a modal warning shown. void CancelNavigation( content::NavigationThrottle::ThrottleCheckResult result); private: scoped_refptr<PasswordProtectionRequestContent> request_; bool is_warning_showing_; }; } // namespace safe_browsing #endif // COMPONENTS_SAFE_BROWSING_CONTENT_BROWSER_PASSWORD_PROTECTION_PASSWORD_PROTECTION_NAVIGATION_THROTTLE_H_
785
7,158
// This file is part of OpenCV project. // It is subject to the license terms in the LICENSE file found in the top-level directory // of this distribution and at http://opencv.org/license.html. // This functions have been contributed by <NAME> <<EMAIL>> #include "../precomp.hpp" #include "geo_interpolation.hpp" #include <string> #include <map> namespace cv { namespace optflow { struct Graph_helper { std::vector<int> mem; int e_size; Graph_helper(int k, int num_nodes) { e_size = (2 * k + 1); mem.resize(e_size * num_nodes, 0); } inline int size(int id) { int r_addr = id * (e_size); return mem[r_addr]; } inline int * data(int id) { int r_addr = id * (e_size)+1; return &mem[r_addr]; } inline void add(int id, std::pair<float, int> data) { int r_addr = id * (e_size); int size = ++mem[r_addr]; r_addr += 2 * size - 1;//== 1 + 2*(size-1); *(float*)&mem[r_addr] = data.first; mem[r_addr + 1] = data.second; } inline bool color_in_target(int id, int color) { int r_addr = id * (e_size); int size = mem[r_addr]; r_addr += 2; for (int i = 0; i < size; i++) { if (mem[r_addr] == color) { return true; } r_addr += 2; } return false; } }; Mat sgeo_dist(const Mat& gra, int y, int x, float max, Mat &prev) { std::vector <Point2f> points; points.push_back(Point2f(static_cast<float>(x), static_cast<float>(y))); return sgeo_dist(gra, points, max, prev); } Mat sgeo_dist(const Mat& gra, const std::vector<Point2f> & points, float max, Mat &prev) { int Dx[] = { -1,0,1,-1,1,-1,0,1 }; int Dy[] = { -1,-1,-1,0,0,1,1,1 }; Mat dm(gra.rows, gra.cols, CV_32F, Scalar(max)); prev = Mat(gra.rows, gra.cols, CV_8U, Scalar(255)); std::multimap<float, Vec2i > not_visited_with_value; for (auto i = points.begin(); i != points.end(); i++) { int y = static_cast<int>(i->y); int x = static_cast<int>(i->x); not_visited_with_value.insert(std::pair<float, Vec2i >(0.f, Vec2i(y, x))); dm.at<float>(y, x) = 0; } bool done = false; while (!done) { if (not_visited_with_value.begin() == not_visited_with_value.end()) { done = true; break; } std::multimap<float, Vec2i >::iterator current_it = not_visited_with_value.begin(); std::pair<float, Vec2i > current_p = *current_it; not_visited_with_value.erase(current_it); int y = current_p.second[0]; int x = current_p.second[1]; float cur_d = current_p.first; if (dm.at<float>(y, x) != cur_d) { continue; } Vec8f gra_e = gra.at<Vec8f>(y, x); for (int i = 0; i < 8; i++) { if (gra_e[i] < 0) { continue; } int dx = Dx[i]; int dy = Dy[i]; if (dm.at<float>(y + dy, x + dx) > cur_d + gra_e[i]) { dm.at<float>(y + dy, x + dx) = cur_d + gra_e[i]; prev.at<uchar>(y + dy, x + dx) = static_cast<uchar>(7 - i); not_visited_with_value.insert(std::pair<float, Vec2i >(cur_d + gra_e[i], Vec2i(y + dy, x + dx))); } } } return dm; } Mat interpolate_irregular_nn_raster(const std::vector<Point2f> & prevPoints, const std::vector<Point2f> & nextPoints, const std::vector<uchar> & status, const Mat & i1) { Mat gra = getGraph(i1, 0.1f); int Dx[] = { -1,0,1,-1,1,-1,0,1 }; int Dy[] = { -1,-1,-1,0,0,1,1,1 }; int max_rounds = 10; Mat dirt = Mat(gra.rows, gra.cols, CV_8U, Scalar(0)); Mat quellknoten = Mat(gra.rows, gra.cols, CV_32S, Scalar(-1)); Mat dist = Mat(gra.rows, gra.cols, CV_32F, Scalar(std::numeric_limits<float>::max())); /* * assign quellknoten ids. */ for (int i = 0; i < static_cast<int>(prevPoints.size()); i++) { int x = (int)prevPoints[i].x; int y = (int)prevPoints[i].y; if (status[i] == 0) continue; dirt.at<uchar>(y, x) = 1; dist.at<float>(y, x) = 0; quellknoten.at<int>(y, x) = i; } bool clean = true; bool done = false; int x = 0; int y = 0; int rounds = 0; while (!done) { /* * Update x and y * on even rounds go rasterscanorder , on odd round inverse rasterscanorder */ if (rounds % 2 == 0) { x++; if (x >= gra.cols) { x = 0; y++; if (y >= gra.rows) { y = 0; rounds++; y = gra.rows - 1; x = gra.cols - 1; if (rounds >= max_rounds || clean) { done = true; break; } } } } else { x--; if (x < 0) { x = gra.cols - 1; y--; if (y < 0) { y = gra.rows - 1; rounds++; y = 0; x = 0; if (rounds >= max_rounds || clean) { done = true; break; } } } } if (dirt.at<uchar>(y, x) == 0) { continue; } dirt.at<uchar>(y, x) = 0; float c_dist = dist.at<float>(y, x); Vec8f gra_e = gra.at<Vec8f>(y, x); for (int i = 0; i < 8; i++) { int tx = Dx[i]; int ty = Dy[i]; if (ty == 0 && tx == 0) { continue; } if (x + tx < 0 || x + tx >= gra.cols) { continue; } if (y + ty < 0 || y + ty >= gra.rows) { continue; } if (c_dist > dist.at<float>(y + ty, x + tx)) { if (c_dist > dist.at<float>(y + ty, x + tx) + gra_e[i]) { quellknoten.at<int>(y, x) = quellknoten.at<int>(y + ty, x + tx); dist.at<float>(y, x) = dist.at<float>(y + ty, x + tx) + gra_e[i]; dirt.at<uchar>(y, x) = 1; clean = false; } } else { if (c_dist + gra_e[i] < dist.at<float>(y + ty, x + tx)) { quellknoten.at<int>(y + ty, x + tx) = quellknoten.at<int>(y, x); dist.at<float>(y + ty, x + tx) = dist.at<float>(y, x) + gra_e[i]; dirt.at<uchar>(y + ty, x + tx) = 1; clean = false; } } } } Mat nnFlow(i1.rows, i1.cols, CV_32FC2, Scalar(0)); for (y = 0; y < i1.rows; y++) { for (x = 0; x < i1.cols; x++) { int id = quellknoten.at<int>(y, x); if (id != -1) { nnFlow.at<Point2f>(y, x) = nextPoints[id] - prevPoints[id]; } } } return nnFlow; } Mat interpolate_irregular_knn( const std::vector<Point2f> & _prevPoints, const std::vector<Point2f> & _nextPoints, const std::vector<uchar> & status, const Mat &color_img, int k, float pixeldistance) { Mat in(color_img.rows, color_img.cols, CV_32FC2); Mat mask = Mat::zeros(color_img.rows, color_img.cols, CV_8UC1); for (unsigned n = 0; n < _prevPoints.size(); n++) { if (_prevPoints[n].x >= 0 && _prevPoints[n].y >= 0 && _prevPoints[n].x < color_img.cols && _prevPoints[n].y < color_img.rows) { in.at<Point2f>(_prevPoints[n]) = _nextPoints[n] - _prevPoints[n]; mask.at<uchar>(_prevPoints[n]) = status[n]; } } int Dx[] = { -1,0,1,-1,1,-1,0,1 }; int Dy[] = { -1,-1,-1,0,0,1,1,1 }; Mat gra = getGraph(color_img, pixeldistance); Mat nnFlow(in.rows, in.cols, CV_32FC2, Scalar(0)); std::multimap<float, Vec2i > my_agents; // <arrivaltim , < target, color >> Graph_helper graph_helper(k, in.rows*in.cols); //< arrivaltime, color> int color = 0; std::vector<Vec2i> flow_point_list; for (int y = 0; y < in.rows; y++) { for (int x = 0; x < in.cols; x++) { if (mask.at<uchar>(y, x) > 0) { flow_point_list.push_back(Vec2i(y, x)); nnFlow.at<Vec2f>(y, x) = in.at<Vec2f>(y, x); int v_id = (y * in.cols + x); graph_helper.add(v_id, std::pair<float, int>(0.f, color)); Vec8f gra_e = gra.at<Vec8f>(y, x); for (int i = 0; i < 8; i++) { if (gra_e[i] < 0) continue; int dx = Dx[i]; int dy = Dy[i]; int target = (y + dy) * in.cols + (x + dx); Vec2i agent(target, color); my_agents.insert(std::pair<float, Vec2i >(gra_e[i], agent)); } color++; } } } int global_time = 0; bool done = false; while (!done) { if (my_agents.size() == 0) { done = true; break; } global_time++; std::multimap<float, Vec2i >::iterator current_it = my_agents.begin(); std::pair<float, Vec2i > current_p = *current_it; my_agents.erase(current_it); int target = current_p.second[0]; color = current_p.second[1]; float arriv_time = current_p.first; Vec8f gra_e = gra.at<Vec8f>(target);// (y*cols+x) if (graph_helper.size(target) >= k) { continue; } bool color_found_in_target = graph_helper.color_in_target(target, color); if (color_found_in_target) { continue; } graph_helper.add(target, std::pair<float, int>(arriv_time, color)); for (int i = 0; i < 8; i++) { if (gra_e[i] < 0) continue; int dx = Dx[i]; int dy = Dy[i]; int new_target = target + dx + (dy*in.cols); if (graph_helper.size(new_target) >= k) { continue; } color_found_in_target = graph_helper.color_in_target(new_target, color); if (color_found_in_target) { continue; } Vec2i new_agent(new_target, color); my_agents.insert(std::pair<float, Vec2i >(arriv_time + gra_e[i], new_agent)); } } Mat ret(in.rows, in.cols*k, CV_32FC2); for (int y = 0; y < in.rows; y++) { for (int x = 0; x < in.cols; x++) { for (int i = 0; i < k; i++) { float dist = *((float*)(graph_helper.data(y*in.cols + x) + 2 * i)); float id = *((float*)(graph_helper.data(y*in.cols + x) + 2 * i + 1)); ret.at<Vec2f>(y, k*x + i) = Vec2f(dist, id); } } } return ret; } Mat getGraph(const Mat &image, float edge_length) { int Dx[] = { -1,0,1,-1,1,-1,0,1 }; int Dy[] = { -1,-1,-1,0,0,1,1,1 }; Mat gra(image.rows, image.cols, CV_32FC(8)); for (int y = 0; y < gra.rows; y++) { for (int x = 0; x < gra.cols; x++) { for (int i = 0; i < 8; i++) { int dx = Dx[i]; int dy = Dy[i]; gra.at<Vec8f>(y, x)[i] = -1; if (x + dx < 0 || y + dy < 0 || x + dx >= gra.cols || y + dy >= gra.rows) { continue; } if (i < 4) { int si = 7 - i; gra.at<Vec8f>(y, x)[i] = gra.at<Vec8f>(y + dy, x + dx)[si]; } else { float p1 = dx * dx*edge_length*edge_length + dy * dy*edge_length*edge_length; float p2 = static_cast<float>(image.at<Vec3b>(y, x)[0] - image.at<Vec3b>(y + dy, x + dx)[0]); float p3 = static_cast<float>(image.at<Vec3b>(y, x)[1] - image.at<Vec3b>(y + dy, x + dx)[1]); float p4 = static_cast<float>(image.at<Vec3b>(y, x)[2] - image.at<Vec3b>(y + dy, x + dx)[2]); gra.at<Vec8f>(y, x)[i] = sqrt(p1 + p2 * p2 + p3 * p3 + p4 * p4); } } } } return gra; } Mat interpolate_irregular_nn( const std::vector<Point2f> & _prevPoints, const std::vector<Point2f> & _nextPoints, const std::vector<uchar> & status, const Mat &color_img, float pixeldistance) { int Dx[] = { -1,0,1,-1,1,-1,0,1 }; int Dy[] = { -1,-1,-1,0,0,1,1,1 }; std::vector<Point2f> prevPoints, nextPoints; std::map<std::pair<float, float>, std::pair<float, float>> flowMap; for (unsigned n = 0; n < _prevPoints.size(); n++) { if (status[n] != 0) { flowMap.insert(std::make_pair( std::make_pair(_prevPoints[n].x, _prevPoints[n].y), std::make_pair(_nextPoints[n].x, _nextPoints[n].y))); prevPoints.push_back(_prevPoints[n]); nextPoints.push_back(_nextPoints[n]); } } Mat gra = getGraph(color_img, pixeldistance); Mat prev; Mat geo_dist = sgeo_dist(gra, prevPoints, std::numeric_limits<float>::max(), prev); Mat nnFlow = Mat::zeros(color_img.size(), CV_32FC2); for (int y = 0; y < nnFlow.rows; y++) { for (int x = 0; x < nnFlow.cols; x++) { int cx = x; int cy = y; while (prev.at<uchar>(cy, cx) != 255) { int i = prev.at<uchar>(cy, cx); cx += Dx[i]; cy += Dy[i]; } auto val = flowMap[std::make_pair(static_cast<float>(cx), static_cast<float>(cy))]; nnFlow.at<Vec2f>(y, x) = Vec2f(val.first - cx, val.second - cy); } } return nnFlow; } }} // namespace
7,941
2,939
#ifndef LUMEN_EIR_CONVERSION_CONSTANT_OP_CONVERSION #define LUMEN_EIR_CONVERSION_CONSTANT_OP_CONVERSION #include "lumen/EIR/Conversion/ConversionSupport.h" namespace lumen { namespace eir { class NullOpConversion; class ConstantAtomOpConversion; class ConstantBoolOpConversion; class ConstantBigIntOpConversion; class ConstantBinaryOpConversion; class ConstantFloatOpConversion; class ConstantFloatOpToStdConversion; class ConstantIntOpConversion; class ConstantListOpConversion; class ConstantMapOpConversion; class ConstantNilOpConversion; class ConstantNoneOpConversion; class ConstantTupleOpConversion; void populateConstantOpConversionPatterns(OwningRewritePatternList &patterns, MLIRContext *context, EirTypeConverter &converter, TargetInfo &targetInfo); } // namespace eir } // namespace lumen #endif // LUMEN_EIR_CONVERSION_CONSTANT_OP_CONVERSION
398
348
{"nom":"Huriel","circ":"2ème circonscription","dpt":"Allier","inscrits":2067,"abs":1043,"votants":1024,"blancs":20,"nuls":13,"exp":991,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":264},{"nuance":"FI","nom":"M. <NAME>","voix":196},{"nuance":"LR","nom":"<NAME>","voix":182},{"nuance":"SOC","nom":"<NAME>","voix":140},{"nuance":"FN","nom":"Mme <NAME>","voix":101},{"nuance":"ECO","nom":"<NAME>","voix":27},{"nuance":"ECO","nom":"M. <NAME>","voix":19},{"nuance":"DIV","nom":"M. <NAME>","voix":13},{"nuance":"EXG","nom":"<NAME>","voix":12},{"nuance":"DLF","nom":"<NAME>","voix":10},{"nuance":"DVD","nom":"M. <NAME>","voix":8},{"nuance":"DVD","nom":"M. <NAME>","voix":6},{"nuance":"DIV","nom":"Mme <NAME>","voix":6},{"nuance":"EXD","nom":"<NAME>","voix":6},{"nuance":"DIV","nom":"M. <NAME>","voix":1}]}
322
4,403
<gh_stars>1000+ package cn.hutool.core.lang.mutable; import cn.hutool.core.lang.Pair; /** * 可变{@link Pair}实现,可以修改键和值 * * @param <K> 键类型 * @param <V> 值类型 * @since 5.7.16 */ public class MutablePair<K, V> extends Pair<K, V> implements Mutable<Pair<K, V>>{ private static final long serialVersionUID = 1L; /** * 构造 * * @param key 键 * @param value 值 */ public MutablePair(K key, V value) { super(key, value); } /** * 设置键 * * @param key 新键 * @return this */ public MutablePair<K, V> setKey(K key) { this.key = key; return this; } /** * 设置值 * * @param value 新值 * @return this */ public MutablePair<K, V> setValue(V value) { this.value = value; return this; } @Override public Pair<K, V> get() { return this; } @Override public void set(Pair<K, V> pair) { this.key = pair.getKey(); this.value = pair.getValue(); } }
461
790
package com.google.appengine.api.xmpp; import com.google.appengine.api.utils.HttpRequestParser; import java.io.IOException; import javax.mail.BodyPart; import javax.mail.MessagingException; import javax.mail.internet.MimeMultipart; import javax.servlet.http.HttpServletRequest; class InboundMessageParser extends HttpRequestParser { static Message parseMessage(HttpServletRequest request) throws IOException { MessageBuilder builder = new MessageBuilder(); builder.withMessageType(MessageType.CHAT); if(request.getParameter("from") != null) builder.withFromJid(new JID(request.getParameter("from"))); if(request.getParameter("to") != null) builder.withRecipientJids(new JID[] { new JID(request.getParameter("to")) }); if(request.getParameter("body") != null) builder.withBody(request.getParameter("body")); if(request.getParameter("stanza") != null) builder.withStanza(request.getParameter("stanza")); return builder.build(); } }
396
384
<reponame>hejamu/gromacs /* * This file is part of the GROMACS molecular simulation package. * * Copyright (c) 2021, by the GROMACS development team, led by * <NAME>, <NAME>, <NAME>, and <NAME>, * and including many others, as listed in the AUTHORS file in the * top-level source directory and at http://www.gromacs.org. * * GROMACS is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2.1 * of the License, or (at your option) any later version. * * GROMACS is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with GROMACS; if not, see * http://www.gnu.org/licenses, or write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * If you want to redistribute modifications to GROMACS, please * consider that scientific software is very special. Version * control is crucial - bugs must be traceable. We will be happy to * consider code for inclusion in the official distribution, but * derived work must not be called official GROMACS. Details are found * in the README & COPYING files - if they are missing, get the * official version at http://www.gromacs.org. * * To help us fund GROMACS development, we humbly ask that you cite * the research papers on the package. Check out http://www.gromacs.org. */ /*! \internal \file * \brief Defines the a helper struct managing reference temperature changes * * \author <NAME> <<EMAIL>> * \ingroup module_modularsimulator */ #include "gmxpre.h" #include "referencetemperaturemanager.h" #include "gromacs/mdtypes/inputrec.h" #include "gromacs/utility/gmxassert.h" namespace gmx { ReferenceTemperatureManager::ReferenceTemperatureManager(t_inputrec* inputrec) : inputrec_(inputrec) { } void ReferenceTemperatureManager::registerUpdateCallback(ReferenceTemperatureCallback referenceTemperatureCallback) { callbacks_.emplace_back(std::move(referenceTemperatureCallback)); } void ReferenceTemperatureManager::setReferenceTemperature(ArrayRef<const real> newReferenceTemperatures, ReferenceTemperatureChangeAlgorithm algorithm) { GMX_RELEASE_ASSERT(newReferenceTemperatures.ssize() == inputrec_->opts.ngtc, "Expected one new reference temperature per temperature group."); std::copy(newReferenceTemperatures.begin(), newReferenceTemperatures.end(), inputrec_->opts.ref_t); for (const auto& callback : callbacks_) { callback(constArrayRefFromArray(inputrec_->opts.ref_t, inputrec_->opts.ngtc), algorithm); } } } // namespace gmx
915
348
<filename>docs/data/leg-t2/067/06705277.json {"nom":"Mackenheim","circ":"5ème circonscription","dpt":"Bas-Rhin","inscrits":579,"abs":315,"votants":264,"blancs":13,"nuls":4,"exp":247,"res":[{"nuance":"REG","nom":"<NAME>","voix":163},{"nuance":"LR","nom":"<NAME>","voix":84}]}
112
2,174
<gh_stars>1000+ package ceui.lisa.adapters; import com.facebook.rebound.SimpleSpringListener; import com.facebook.rebound.Spring; import com.facebook.rebound.SpringConfig; import com.facebook.rebound.SpringSystem; import ceui.lisa.databinding.RecyDownloadedBinding; class DownloadedHolder extends ViewHolder<RecyDownloadedBinding> { Spring spring; DownloadedHolder(RecyDownloadedBinding bindView) { super(bindView); SpringSystem springSystem = SpringSystem.create(); spring = springSystem.createSpring(); spring.setSpringConfig(SpringConfig.fromOrigamiTensionAndFriction(40, 5)); spring.addListener(new SimpleSpringListener() { @Override public void onSpringUpdate(Spring spring) { itemView.setTranslationX((float) spring.getCurrentValue()); } }); } }
326
787
<reponame>mlouward/rust { "blurb": "Given a person's allergy score, determine whether or not they're allergic to a given item, and their full list of allergies.", "authors": [ "EduardoBautista" ], "contributors": [ "ashleygwilliams", "coriolinus", "cwhakes", "EduardoBautista", "efx", "ErikSchierboom", "IanWhitney", "kytrinyx", "lutostag", "mkantor", "navossoc", "nfiles", "petertseng", "rofrol", "stringparser", "taravancil", "xakon", "ZapAnton" ], "files": { "solution": [ "src/lib.rs" ], "test": [ "tests/allergies.rs" ], "example": [ ".meta/example.rs" ] }, "source": "Jumpstart Lab Warm-up", "source_url": "http://jumpstartlab.com" }
372
7,739
<gh_stars>1000+ import pytest from ludwig.modules.mlp_mixer_modules import MixerBlock, MLP, MLPMixer from .test_utils import assert_output_shapes @pytest.mark.parametrize("in_features,hidden_size,out_features", [(3, 8, 8), (8, 64, 32)]) def test_mlp(in_features: int, hidden_size: int, out_features: int): assert_output_shapes(module=MLP(in_features, hidden_size, out_features), input_shape=(in_features,)) @pytest.mark.parametrize("embed_size,n_patches,token_dim,channel_dim", [(512, 49, 2048, 256)]) def test_mixer_block( embed_size: int, n_patches: int, token_dim: int, channel_dim: int, ): assert_output_shapes( module=MixerBlock(embed_size, n_patches, token_dim, channel_dim), input_shape=(n_patches, embed_size) ) @pytest.mark.parametrize("img_height,img_width,in_channels", [(224, 224, 3)]) def test_mlp_mixer(img_height: int, img_width: int, in_channels: int): assert_output_shapes(module=MLPMixer(img_height, img_width, in_channels), input_shape=(3, img_height, img_width))
416
759
<filename>rome-modules/src/main/java/com/rometools/modules/mediarss/io/MediaModuleGenerator.java<gh_stars>100-1000 /* * Copyright 2006 <NAME>, openvision.tv * * This code is currently released under the Mozilla Public License. * http://www.mozilla.org/MPL/ * * Alternately you may apply the terms of the Apache Software License * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.rometools.modules.mediarss.io; import java.net.URL; import java.util.HashSet; import java.util.Set; import org.jdom2.Element; import org.jdom2.Namespace; import com.rometools.modules.georss.GMLGenerator; import com.rometools.modules.mediarss.MediaEntryModule; import com.rometools.modules.mediarss.MediaModule; import com.rometools.modules.mediarss.types.Category; import com.rometools.modules.mediarss.types.Credit; import com.rometools.modules.mediarss.types.Embed.Param; import com.rometools.modules.mediarss.types.License; import com.rometools.modules.mediarss.types.Location; import com.rometools.modules.mediarss.types.MediaContent; import com.rometools.modules.mediarss.types.MediaGroup; import com.rometools.modules.mediarss.types.Metadata; import com.rometools.modules.mediarss.types.PeerLink; import com.rometools.modules.mediarss.types.PlayerReference; import com.rometools.modules.mediarss.types.Price; import com.rometools.modules.mediarss.types.Rating; import com.rometools.modules.mediarss.types.Restriction; import com.rometools.modules.mediarss.types.Scene; import com.rometools.modules.mediarss.types.SubTitle; import com.rometools.modules.mediarss.types.Tag; import com.rometools.modules.mediarss.types.Text; import com.rometools.modules.mediarss.types.Thumbnail; import com.rometools.modules.mediarss.types.UrlReference; import com.rometools.rome.feed.module.Module; import com.rometools.rome.io.ModuleGenerator; /** * Generator for MediaRSS module. * */ public class MediaModuleGenerator implements ModuleGenerator { private static final Namespace NS = Namespace.getNamespace("media", MediaModule.URI); private static final Set<Namespace> NAMESPACES = new HashSet<Namespace>(); static { NAMESPACES.add(NS); } @Override public String getNamespaceUri() { return MediaModule.URI; } @Override public Set<Namespace> getNamespaces() { return NAMESPACES; } @Override public void generate(final Module module, final Element element) { if (module instanceof MediaModule) { final MediaModule m = (MediaModule) module; generateMetadata(m.getMetadata(), element); generatePlayer(m.getPlayer(), element); } if (module instanceof MediaEntryModule) { final MediaEntryModule m = (MediaEntryModule) module; for (final MediaGroup element2 : m.getMediaGroups()) { generateGroup(element2, element); } for (final MediaContent element2 : m.getMediaContents()) { generateContent(element2, element); } } } public void generateContent(final MediaContent c, final Element e) { final Element mc = new Element("content", NS); addNotNullAttribute(mc, "medium", c.getMedium()); addNotNullAttribute(mc, "channels", c.getAudioChannels()); addNotNullAttribute(mc, "bitrate", c.getBitrate()); addNotNullAttribute(mc, "duration", c.getDuration()); addNotNullAttribute(mc, "expression", c.getExpression()); addNotNullAttribute(mc, "fileSize", c.getFileSize()); addNotNullAttribute(mc, "framerate", c.getFramerate()); addNotNullAttribute(mc, "height", c.getHeight()); addNotNullAttribute(mc, "lang", c.getLanguage()); addNotNullAttribute(mc, "samplingrate", c.getSamplingrate()); addNotNullAttribute(mc, "type", c.getType()); addNotNullAttribute(mc, "width", c.getWidth()); if (c.isDefaultContent()) { addNotNullAttribute(mc, "isDefault", "true"); } if (c.getReference() instanceof UrlReference) { addNotNullAttribute(mc, "url", c.getReference()); generatePlayer(c.getPlayer(), mc); } else { generatePlayer(c.getPlayer(), mc); } generateMetadata(c.getMetadata(), mc); e.addContent(mc); } public void generateGroup(final MediaGroup g, final Element e) { final Element t = new Element("group", NS); final MediaContent[] c = g.getContents(); for (final MediaContent element : c) { generateContent(element, t); } generateMetadata(g.getMetadata(), t); e.addContent(t); } public void generateMetadata(final Metadata m, final Element e) { if (m == null) { return; } final Category[] cats = m.getCategories(); for (final Category cat : cats) { final Element c = generateSimpleElement("category", cat.getValue()); addNotNullAttribute(c, "scheme", cat.getScheme()); addNotNullAttribute(c, "label", cat.getLabel()); e.addContent(c); } final Element copyright = addNotNullElement(e, "copyright", m.getCopyright()); addNotNullAttribute(copyright, "url", m.getCopyrightUrl()); final Credit[] creds = m.getCredits(); for (final Credit cred : creds) { final Element c = generateSimpleElement("credit", cred.getName()); addNotNullAttribute(c, "role", cred.getRole()); addNotNullAttribute(c, "scheme", cred.getScheme()); e.addContent(c); } final Element desc = addNotNullElement(e, "description", m.getDescription()); addNotNullAttribute(desc, "type", m.getDescriptionType()); if (m.getHash() != null) { final Element hash = addNotNullElement(e, "hash", m.getHash().getValue()); addNotNullAttribute(hash, "algo", m.getHash().getAlgorithm()); } final String[] keywords = m.getKeywords(); if (keywords.length > 0) { String keyword = keywords[0]; for (int i = 1; i < keywords.length; i++) { keyword += ", " + keywords[i]; } addNotNullElement(e, "keywords", keyword); } final Rating[] rats = m.getRatings(); for (final Rating rat2 : rats) { final Element rat = addNotNullElement(e, "rating", rat2.getValue()); addNotNullAttribute(rat, "scheme", rat2.getScheme()); if (rat2.equals(Rating.ADULT)) { addNotNullElement(e, "adult", "true"); } else if (rat2.equals(Rating.NONADULT)) { addNotNullElement(e, "adult", "false"); } } final Text[] text = m.getText(); for (final Text element : text) { final Element t = addNotNullElement(e, "text", element.getValue()); addNotNullAttribute(t, "type", element.getType()); addNotNullAttribute(t, "start", element.getStart()); addNotNullAttribute(t, "end", element.getEnd()); } final Element title = addNotNullElement(e, "title", m.getTitle()); addNotNullAttribute(title, "type", m.getTitleType()); generateBackLinks(m, e); generateComments(m, e); generateCommunity(m, e); generateEmbed(m, e); generateLicenses(m, e); generateLocations(m, e); generatePeerLinks(m, e); generatePrices(m, e); generateResponses(m, e); final Restriction[] r = m.getRestrictions(); for (final Restriction element : r) { final Element res = addNotNullElement(e, "restriction", element.getValue()); addNotNullAttribute(res, "type", element.getType()); addNotNullAttribute(res, "relationship", element.getRelationship()); } if (m.getRights() != null) { final Element rights = new Element("rights", NS); rights.setAttribute("status", m.getRights().name()); e.addContent(rights); } generateScenes(m, e); generateStatus(m, e); generateSubTitles(m, e); generateThumbails(m, e); } /** * Generation of thumbnail tags. * * @param m source * @param e element to attach new element to */ private void generateThumbails(final Metadata m, final Element e) { for (final Thumbnail thumb : m.getThumbnail()) { final Element t = new Element("thumbnail", NS); addNotNullAttribute(t, "url", thumb.getUrl()); addNotNullAttribute(t, "width", thumb.getWidth()); addNotNullAttribute(t, "height", thumb.getHeight()); addNotNullAttribute(t, "time", thumb.getTime()); e.addContent(t); } } /** * Generation of backLinks tag. * * @param m source * @param e element to attach new element to */ private void generateBackLinks(final Metadata m, final Element e) { final Element backLinksElements = new Element("backLinks", NS); for (final URL backLink : m.getBackLinks()) { addNotNullElement(backLinksElements, "backLink", backLink); } if (!backLinksElements.getChildren().isEmpty()) { e.addContent(backLinksElements); } } /** * Generation of comments tag. * * @param m source * @param e element to attach new element to */ private void generateComments(final Metadata m, final Element e) { final Element commentsElements = new Element("comments", NS); for (final String comment : m.getComments()) { addNotNullElement(commentsElements, "comment", comment); } if (!commentsElements.getChildren().isEmpty()) { e.addContent(commentsElements); } } /** * Generation of community tag. * * @param m source * @param e element to attach new element to */ private void generateCommunity(final Metadata m, final Element e) { if (m.getCommunity() == null) { return; } final Element communityElement = new Element("community", NS); if (m.getCommunity().getStarRating() != null) { final Element starRatingElement = new Element("starRating", NS); addNotNullAttribute(starRatingElement, "average", m.getCommunity().getStarRating().getAverage()); addNotNullAttribute(starRatingElement, "count", m.getCommunity().getStarRating().getCount()); addNotNullAttribute(starRatingElement, "min", m.getCommunity().getStarRating().getMin()); addNotNullAttribute(starRatingElement, "max", m.getCommunity().getStarRating().getMax()); if (starRatingElement.hasAttributes()) { communityElement.addContent(starRatingElement); } } if (m.getCommunity().getStatistics() != null) { final Element statisticsElement = new Element("statistics", NS); addNotNullAttribute(statisticsElement, "views", m.getCommunity().getStatistics().getViews()); addNotNullAttribute(statisticsElement, "favorites", m.getCommunity().getStatistics().getFavorites()); if (statisticsElement.hasAttributes()) { communityElement.addContent(statisticsElement); } } if (m.getCommunity().getTags() != null && !m.getCommunity().getTags().isEmpty()) { final Element tagsElement = new Element("tags", NS); for (final Tag tag : m.getCommunity().getTags()) { if (!tagsElement.getTextTrim().isEmpty()) { tagsElement.addContent(", "); } if (tag.getWeight() == null) { tagsElement.addContent(tag.getName()); } else { tagsElement.addContent(tag.getName()); tagsElement.addContent(":"); tagsElement.addContent(String.valueOf(tag.getWeight())); } } if (!tagsElement.getTextTrim().isEmpty()) { communityElement.addContent(tagsElement); } } if (!communityElement.getChildren().isEmpty()) { e.addContent(communityElement); } } /** * Generation of embed tag. * * @param m source * @param e element to attach new element to */ private void generateEmbed(final Metadata m, final Element e) { if (m.getEmbed() == null) { return; } final Element embedElement = new Element("embed", NS); addNotNullAttribute(embedElement, "url", m.getEmbed().getUrl()); addNotNullAttribute(embedElement, "width", m.getEmbed().getWidth()); addNotNullAttribute(embedElement, "height", m.getEmbed().getHeight()); for (final Param param : m.getEmbed().getParams()) { final Element paramElement = addNotNullElement(embedElement, "param", param.getValue()); if (paramElement != null) { addNotNullAttribute(paramElement, "name", param.getName()); } } if (embedElement.hasAttributes() || !embedElement.getChildren().isEmpty()) { e.addContent(embedElement); } } /** * Generation of scenes tag. * * @param m source * @param e element to attach new element to */ private void generateScenes(final Metadata m, final Element e) { final Element scenesElement = new Element("scenes", NS); for (final Scene scene : m.getScenes()) { final Element sceneElement = new Element("scene", NS); addNotNullElement(sceneElement, "sceneTitle", scene.getTitle()); addNotNullElement(sceneElement, "sceneDescription", scene.getDescription()); addNotNullElement(sceneElement, "sceneStartTime", scene.getStartTime()); addNotNullElement(sceneElement, "sceneEndTime", scene.getEndTime()); if (!sceneElement.getChildren().isEmpty()) { scenesElement.addContent(sceneElement); } } if (!scenesElement.getChildren().isEmpty()) { e.addContent(scenesElement); } } /** * Generation of location tags. * * @param m source * @param e element to attach new element to */ private void generateLocations(final Metadata m, final Element e) { final GMLGenerator geoRssGenerator = new GMLGenerator(); for (final Location location : m.getLocations()) { final Element locationElement = new Element("location", NS); addNotNullAttribute(locationElement, "description", location.getDescription()); addNotNullAttribute(locationElement, "start", location.getStart()); addNotNullAttribute(locationElement, "end", location.getEnd()); if (location.getGeoRss() != null) { geoRssGenerator.generate(location.getGeoRss(), locationElement); } if (locationElement.hasAttributes() || !locationElement.getChildren().isEmpty()) { e.addContent(locationElement); } } } /** * Generation of peerLink tags. * * @param m source * @param e element to attach new element to */ private void generatePeerLinks(final Metadata m, final Element e) { for (final PeerLink peerLink : m.getPeerLinks()) { final Element peerLinkElement = new Element("peerLink", NS); addNotNullAttribute(peerLinkElement, "type", peerLink.getType()); addNotNullAttribute(peerLinkElement, "href", peerLink.getHref()); if (peerLinkElement.hasAttributes()) { e.addContent(peerLinkElement); } } } /** * Generation of subTitle tags. * * @param m source * @param e element to attach new element to */ private void generateSubTitles(final Metadata m, final Element e) { for (final SubTitle subTitle : m.getSubTitles()) { final Element subTitleElement = new Element("subTitle", NS); addNotNullAttribute(subTitleElement, "type", subTitle.getType()); addNotNullAttribute(subTitleElement, "lang", subTitle.getLang()); addNotNullAttribute(subTitleElement, "href", subTitle.getHref()); if (subTitleElement.hasAttributes()) { e.addContent(subTitleElement); } } } /** * Generation of license tags. * * @param m source * @param e element to attach new element to */ private void generateLicenses(final Metadata m, final Element e) { for (final License license : m.getLicenses()) { final Element licenseElement = new Element("license", NS); addNotNullAttribute(licenseElement, "type", license.getType()); addNotNullAttribute(licenseElement, "href", license.getHref()); if (license.getValue() != null) { licenseElement.addContent(license.getValue()); } if (licenseElement.hasAttributes() || !licenseElement.getTextTrim().isEmpty()) { e.addContent(licenseElement); } } } /** * Generation of backLinks tag. * * @param m source * @param e element to attach new element to */ private void generatePrices(final Metadata m, final Element e) { for (final Price price : m.getPrices()) { if (price == null) { continue; } final Element priceElement = new Element("price", NS); if (price.getType() != null) { priceElement.setAttribute("type", price.getType().name().toLowerCase()); } addNotNullAttribute(priceElement, "info", price.getInfo()); addNotNullAttribute(priceElement, "price", price.getPrice()); addNotNullAttribute(priceElement, "currency", price.getCurrency()); if (priceElement.hasAttributes()) { e.addContent(priceElement); } } } /** * Generation of responses tag. * * @param m source * @param e element to attach new element to */ private void generateResponses(final Metadata m, final Element e) { if (m.getResponses() == null || m.getResponses().length == 0) { return; } final Element responsesElements = new Element("responses", NS); for (final String response : m.getResponses()) { addNotNullElement(responsesElements, "response", response); } e.addContent(responsesElements); } /** * Generation of status tag. * * @param m source * @param e element to attach new element to */ private void generateStatus(final Metadata m, final Element e) { if (m.getStatus() == null) { return; } final Element statusElement = new Element("status", NS); if (m.getStatus().getState() != null) { statusElement.setAttribute("state", m.getStatus().getState().name()); } addNotNullAttribute(statusElement, "reason", m.getStatus().getReason()); if (statusElement.hasAttributes()) { e.addContent(statusElement); } } public void generatePlayer(final PlayerReference p, final Element e) { if (p == null) { return; } final Element t = new Element("player", NS); addNotNullAttribute(t, "url", p.getUrl()); addNotNullAttribute(t, "width", p.getWidth()); addNotNullAttribute(t, "height", p.getHeight()); e.addContent(t); } protected void addNotNullAttribute(final Element target, final String name, final Object value) { if (target == null || value == null) { return; } else { target.setAttribute(name, value.toString()); } } protected Element addNotNullElement(final Element target, final String name, final Object value) { if (value == null) { return null; } else { final Element e = generateSimpleElement(name, value.toString()); target.addContent(e); return e; } } protected Element generateSimpleElement(final String name, final String value) { final Element element = new Element(name, NS); element.addContent(value); return element; } }
8,725
611
# SPDX-License-Identifier: BSD-3-Clause # Copyright Contributors to the OpenColorIO Project. import copy, unittest, os, sys import PyOpenColorIO as OCIO import inspect class TransformsTest(unittest.TestCase): def all_transforms_as_group(self): # Default arguments for Transforms that can't be instantiated without arguments. default_args = { OCIO.FixedFunctionTransform: { 'style': OCIO.FIXED_FUNCTION_RGB_TO_HSV }, OCIO.LogCameraTransform: { 'linSideBreak': [0.5, 0.5, 0.5] } } allTransformsAsGroup = OCIO.GroupTransform() # Search for all transform types in order to handle future transforms for n, c in inspect.getmembers(OCIO): if hasattr(c, 'getTransformType'): try: # Attempt to construct each Transform subclass, raising exception in order to # filter the parent OCIO.Transform class if c in default_args: # Plug in kwargs if needed allTransformsAsGroup.appendTransform(c(**default_args[c])) else: allTransformsAsGroup.appendTransform(c()) except TypeError as e: # Ensure we only catch and filter for this specific error self.assertEqual( str(e), 'PyOpenColorIO.Transform: No constructor defined!', 'Unintended Error Raised: {0}'.format(e) ) return allTransformsAsGroup def test_copy(self): """ Test the deepcopy() method. """ for transform in self.all_transforms_as_group(): other = copy.deepcopy(transform) self.assertFalse(other is transform) self.assertEquals(other.getTransformType(), transform.getTransformType()) self.assertEquals(other.getDirection(), transform.getDirection()) # Not all OCIO.Transform have equals methods if hasattr(transform, 'equals'): self.assertTrue(other.equals(transform)) other.setDirection(OCIO.TRANSFORM_DIR_INVERSE) self.assertNotEquals(other.getDirection(), transform.getDirection()) def test_binding_group_polymorphism(self): """ Tests polymorphism issue where transforms are cast as parent class when using GroupTransforms. Flagged in https://github.com/AcademySoftwareFoundation/OpenColorIO/issues/1211 """ for transform in self.all_transforms_as_group(): # Ensure no transforms have been cast as parent transform self.assertNotEqual( type(transform), OCIO.Transform, """Transform has unintentionally been cast as parent class! transform.getTransformType(): {0} type(transform): {1} Are there pybind polymorphic_type_hooks in src/bindings/PyOpenColorIO.h for this transform? """.format(transform.getTransformType(), type(transform)) )
1,465
8,027
<gh_stars>1000+ /* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.event.listener.util; import java.util.Optional; /** Progress information */ public class ProgressEstimation { public static final ProgressEstimation UNKNOWN = new ProgressEstimation(Optional.empty(), Optional.empty()); /** A number between 0 and 1 estimating a progress */ private final Optional<Double> progress; /** Number of something processed (eg number of files parsed) */ private final Optional<Integer> number; public ProgressEstimation(Optional<Double> progress, Optional<Integer> number) { this.progress = progress; this.number = number; } public Optional<Double> getProgress() { return progress; } public Optional<Integer> getNumber() { return number; } }
371
328
# Copyright 2022 The BladeDISC Authors. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os # Enable stitch fusion optimization. os.environ["DISC_ENABLE_STITCH"] = "true" from transformers import T5Tokenizer, T5Model, T5Config import torch import time import ctypes import torch_blade import torch_blade.tensorrt _cudart = ctypes.CDLL('libcudart.so') def cu_prof_start(): ret = _cudart.cudaProfilerStart() if ret != 0: raise Exception('cudaProfilerStart() returned %d' % ret) def cu_prof_stop(): ret = _cudart.cudaProfilerStop() if ret != 0: raise Exception('cudaProfilerStop() returned %d' % ret) def trace_model(model, inputs, amp: bool): with torch.cuda.amp.autocast(amp), torch.no_grad(): traced_model = torch.jit.trace(model, inputs, strict=False) torch._C._jit_pass_inline(traced_model.graph) return traced_model def evaluate_torch(model, inputs): # warmup for i in range(20): model(*tuple(inputs)) iters = 100 tic = time.time() for i in range(iters): model(*tuple(inputs)) avg_time = (time.time() - tic) / iters print("average time in {} iterations: {} seconds".format(iters, avg_time)) # profile start cu_prof_start() model(*tuple(inputs)) cu_prof_stop() # profile end def disc_optimize(model, inputs, out_file: str): torch_config = torch_blade.config.Config() torch_config.enable_mlir_amp = False # disable mix-precision torch_config.enable_force_to_cuda = True traced_model = torch.jit.trace(model.cuda(), inputs, strict=False).cuda().eval() torch._C._jit_pass_inline(traced_model._c.forward.graph) torch._C._jit_pass_remove_dropout(traced_model._c) with torch.no_grad(), torch_config: # BladeDISC torch_blade optimize will return an optimized TorchScript optimized_ts = torch_blade.optimize(traced_model, allow_tracing=True, model_inputs=tuple(inputs)) torch.jit.save(optimized_ts, out_file) def run(): tokenizer = T5Tokenizer.from_pretrained("t5-base") input_ids = tokenizer( "Studies have been shown that owning a dog is good for you", return_tensors="pt").input_ids # Batch size 1 input_ids = input_ids.cuda() mask = torch.ones_like(input_ids) decoder_input_ids = tokenizer( "Studies show that", return_tensors="pt").input_ids # Batch size 1 decoder_input_ids = decoder_input_ids.cuda() inputs = (input_ids, mask, decoder_input_ids) model = T5Model.from_pretrained("t5-base", torchscript=True).eval().cuda() traced_model_amp = trace_model(model, inputs, True).eval().cuda() # Run naive torch. print("Naive PyTorch.") model = traced_model_amp evaluate_torch(model, inputs) # Run BladeDISC optimization. print("BladeDISC Optimization.") disc_optimize(traced_model_amp, inputs, 't5-base_amp.disc.pt') model = torch.jit.load('t5-base_amp.disc.pt').cuda().eval() evaluate_torch(model, inputs) if __name__ == '__main__': run()
1,423
316
package pv.com.pvcloudgo.model.msg; import java.util.List; import pv.com.pvcloudgo.model.base.BaseRespMsg; import pv.com.pvcloudgo.model.bean.Pager; import pv.com.pvcloudgo.model.bean.ProductPackage; import pv.com.pvcloudgo.model.bean.ProductPackageType; /** * Created by Administrator on 2017/1/12. */ public class GetTcMsg extends BaseRespMsg { public Result results; public Result getResults() { return results; } public class Result { String canDingjin; ProductPackageType productPackageType; List<ProductPackage> list; Pager pager; public ProductPackageType getProductPackageType() { return productPackageType; } public List<ProductPackage> getList() { return list; } public Pager getPager() { return pager; } public String getCanDingjin() { return canDingjin; } } }
410
545
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "BloomFilter.hh" #include "orc/OrcFile.hh" #include "wrap/gtest-wrapper.h" namespace orc { TEST(TestBloomFilter, testBitSetEqual) { BitSet bitSet64_1(64), bitSet64_2(64), bitSet32(128); EXPECT_TRUE(bitSet64_1 == bitSet64_2); EXPECT_FALSE(bitSet64_1 == bitSet32); bitSet64_1.set(6U); bitSet64_1.set(16U); bitSet64_1.set(26U); bitSet64_2.set(6U); bitSet64_2.set(16U); bitSet64_2.set(26U); EXPECT_TRUE(bitSet64_1 == bitSet64_2); EXPECT_EQ(bitSet64_1.get(6U), bitSet64_2.get(6U)); EXPECT_EQ(bitSet64_1.get(16U), bitSet64_2.get(16U)); EXPECT_EQ(bitSet64_1.get(26U), bitSet64_2.get(26U)); bitSet64_1.set(36U); bitSet64_2.set(46U); EXPECT_FALSE(bitSet64_1 == bitSet64_2); EXPECT_TRUE(bitSet64_1.get(36U)); EXPECT_TRUE(bitSet64_2.get(46U)); bitSet64_1.clear(); bitSet64_2.clear(); EXPECT_TRUE(bitSet64_1 == bitSet64_2); } // ported from Java ORC TEST(TestBloomFilter, testSetGetBitSet) { BitSet bitset(128); // set every 9th bit for a rotating pattern for (uint64_t l = 0; l < 8; ++l) { bitset.set(l * 9); } // set every non-9th bit for (uint64_t l = 8; l < 16; ++l) { for(uint64_t b = 0; b < 8; ++b) { if (b != l - 8) { bitset.set(l * 8 + b); } } } for(uint64_t b = 0; b < 64; ++b) { EXPECT_EQ(b % 9 == 0, bitset.get(b)); } for(uint64_t b = 64; b < 128; ++b) { EXPECT_EQ((b % 8) != (b - 64) / 8, bitset.get(b)); } // test that the longs are mapped correctly const uint64_t * longs = bitset.getData(); EXPECT_EQ(128, bitset.bitSize()); EXPECT_EQ(0x8040201008040201L, longs[0]); EXPECT_EQ(~0x8040201008040201L, longs[1]); } // Same test as TestBloomFilter#testLongHash() in Java codes. Make sure the hash values // are consistent between the Java client and C++ client. // TODO(ORC-1025): Add exhaustive test on all numbers. TEST(TestBloomFilter, testLongHash) { EXPECT_EQ(0, orc::getLongHash(0)); EXPECT_EQ(6614246905173314819, orc::getLongHash(-1)); EXPECT_EQ(-5218250166726157773, orc::getLongHash(-2)); EXPECT_EQ(1396019780946710816, orc::getLongHash(-3)); EXPECT_EQ(3691278333958578070, orc::getLongHash(-9223372036854775805)); EXPECT_EQ(-1192099642781211952, orc::getLongHash(-9223372036854775806)); EXPECT_EQ(-9102499068535824902, orc::getLongHash(-9223372036854775807)); EXPECT_EQ(1499534499340523007, orc::getLongHash(790302201)); EXPECT_EQ(-5108695154500810163, orc::getLongHash(790302202)); EXPECT_EQ(-2450623810987162260, orc::getLongHash(790302203)); EXPECT_EQ(-1097054448615658549, orc::getLongHash(18000000000)); EXPECT_EQ(-4986173376161118712, orc::getLongHash(9223372036064673413)); EXPECT_EQ(3785699328822078862, orc::getLongHash(9223372036064673414)); EXPECT_EQ(294188322706112357, orc::getLongHash(9223372036064673415)); } #define CheckBitSet(bf, p1, p2, p3, p4, p5) \ EXPECT_TRUE(bf.mBitSet->get(p1)); \ EXPECT_TRUE(bf.mBitSet->get(p2)); \ EXPECT_TRUE(bf.mBitSet->get(p3)); \ EXPECT_TRUE(bf.mBitSet->get(p4)); \ EXPECT_TRUE(bf.mBitSet->get(p5)) // Same test as TestBloomFilter#testBasicOperations() in Java codes. We also // verifies the bitSet positions that are set, to make sure both the Java and C++ codes // hash the same value into the same position. TEST(TestBloomFilter, testBloomFilterBasicOperations) { BloomFilterImpl bloomFilter(128); // test integers bloomFilter.reset(); EXPECT_FALSE(bloomFilter.testLong(1)); EXPECT_FALSE(bloomFilter.testLong(11)); EXPECT_FALSE(bloomFilter.testLong(111)); EXPECT_FALSE(bloomFilter.testLong(1111)); EXPECT_FALSE(bloomFilter.testLong(0)); EXPECT_FALSE(bloomFilter.testLong(-1)); EXPECT_FALSE(bloomFilter.testLong(-11)); EXPECT_FALSE(bloomFilter.testLong(-111)); EXPECT_FALSE(bloomFilter.testLong(-1111)); bloomFilter.addLong(1); CheckBitSet(bloomFilter, 567, 288, 246, 306, 228); bloomFilter.addLong(11); CheckBitSet(bloomFilter, 228, 285, 342, 399, 456); bloomFilter.addLong(111); CheckBitSet(bloomFilter, 802, 630, 458, 545, 717); bloomFilter.addLong(1111); CheckBitSet(bloomFilter, 826, 526, 40, 480, 86); bloomFilter.addLong(0); CheckBitSet(bloomFilter, 0, 0, 0, 0, 0); bloomFilter.addLong(-1); CheckBitSet(bloomFilter, 120, 308, 335, 108, 535); bloomFilter.addLong(-11); CheckBitSet(bloomFilter, 323, 685, 215, 577, 107); bloomFilter.addLong(-111); CheckBitSet(bloomFilter, 357, 318, 279, 15, 54); bloomFilter.addLong(-1111); CheckBitSet(bloomFilter, 572, 680, 818, 434, 232); EXPECT_TRUE(bloomFilter.testLong(1)); EXPECT_TRUE(bloomFilter.testLong(11)); EXPECT_TRUE(bloomFilter.testLong(111)); EXPECT_TRUE(bloomFilter.testLong(1111)); EXPECT_TRUE(bloomFilter.testLong(0)); EXPECT_TRUE(bloomFilter.testLong(-1)); EXPECT_TRUE(bloomFilter.testLong(-11)); EXPECT_TRUE(bloomFilter.testLong(-111)); EXPECT_TRUE(bloomFilter.testLong(-1111)); // test doubles bloomFilter.reset(); EXPECT_FALSE(bloomFilter.testDouble(1.1)); EXPECT_FALSE(bloomFilter.testDouble(11.11)); EXPECT_FALSE(bloomFilter.testDouble(111.111)); EXPECT_FALSE(bloomFilter.testDouble(1111.1111)); EXPECT_FALSE(bloomFilter.testDouble(0.0)); EXPECT_FALSE(bloomFilter.testDouble(-1.1)); EXPECT_FALSE(bloomFilter.testDouble(-11.11)); EXPECT_FALSE(bloomFilter.testDouble(-111.111)); EXPECT_FALSE(bloomFilter.testDouble(-1111.1111)); bloomFilter.addDouble(1.1); CheckBitSet(bloomFilter, 522, 692, 12, 370, 753); bloomFilter.addDouble(11.11); CheckBitSet(bloomFilter, 210, 188, 89, 720, 389); bloomFilter.addDouble(111.111); CheckBitSet(bloomFilter, 831, 252, 583, 500, 335); bloomFilter.addDouble(1111.1111); CheckBitSet(bloomFilter, 725, 175, 374, 92, 642); bloomFilter.addDouble(0.0); CheckBitSet(bloomFilter, 0, 0, 0, 0, 0); bloomFilter.addDouble(-1.1); CheckBitSet(bloomFilter, 636, 163, 565, 206, 679); bloomFilter.addDouble(-11.11); CheckBitSet(bloomFilter, 473, 192, 743, 462, 181); bloomFilter.addDouble(-111.111); CheckBitSet(bloomFilter, 167, 152, 472, 295, 24); bloomFilter.addDouble(-1111.1111); CheckBitSet(bloomFilter, 308, 346, 384, 422, 371); EXPECT_TRUE(bloomFilter.testDouble(1.1)); EXPECT_TRUE(bloomFilter.testDouble(11.11)); EXPECT_TRUE(bloomFilter.testDouble(111.111)); EXPECT_TRUE(bloomFilter.testDouble(1111.1111)); EXPECT_TRUE(bloomFilter.testDouble(0.0)); EXPECT_TRUE(bloomFilter.testDouble(-1.1)); EXPECT_TRUE(bloomFilter.testDouble(-11.11)); EXPECT_TRUE(bloomFilter.testDouble(-111.111)); EXPECT_TRUE(bloomFilter.testDouble(-1111.1111)); // test strings bloomFilter.reset(); const char * emptyStr = ""; const char * enStr = "english"; const char * cnStr = "中国字"; EXPECT_FALSE(bloomFilter.testBytes(emptyStr, static_cast<int64_t>(strlen(emptyStr)))); EXPECT_FALSE(bloomFilter.testBytes(enStr, static_cast<int64_t>(strlen(enStr)))); EXPECT_FALSE(bloomFilter.testBytes(cnStr, static_cast<int64_t>(strlen(cnStr)))); bloomFilter.addBytes(emptyStr, static_cast<int64_t>(strlen(emptyStr))); CheckBitSet(bloomFilter, 656, 807, 480, 151, 304); bloomFilter.addBytes(enStr, static_cast<int64_t>(strlen(enStr))); CheckBitSet(bloomFilter, 576, 221, 68, 729, 392); bloomFilter.addBytes(cnStr, static_cast<int64_t>(strlen(cnStr))); CheckBitSet(bloomFilter, 602, 636, 44, 362, 318); EXPECT_TRUE(bloomFilter.testBytes(emptyStr, static_cast<int64_t>(strlen(emptyStr)))); EXPECT_TRUE(bloomFilter.testBytes(enStr, static_cast<int64_t>(strlen(enStr)))); EXPECT_TRUE(bloomFilter.testBytes(cnStr, static_cast<int64_t>(strlen(cnStr)))); } TEST(TestBloomFilter, testBloomFilterSerialization) { BloomFilterImpl emptyFilter1(128), emptyFilter2(256); EXPECT_FALSE(emptyFilter1 == emptyFilter2); BloomFilterImpl emptyFilter3(128, 0.05), emptyFilter4(128, 0.01); EXPECT_FALSE(emptyFilter3 == emptyFilter4); BloomFilterImpl srcBloomFilter(64); srcBloomFilter.addLong(1); srcBloomFilter.addLong(11); srcBloomFilter.addLong(111); srcBloomFilter.addLong(1111); srcBloomFilter.addLong(0); srcBloomFilter.addLong(-1); srcBloomFilter.addLong(-11); srcBloomFilter.addLong(-111); srcBloomFilter.addLong(-1111); proto::BloomFilter pbBloomFilter; proto::ColumnEncoding encoding; encoding.set_bloomencoding(1); // serialize BloomFilterUTF8Utils::serialize(srcBloomFilter, pbBloomFilter); // deserialize std::unique_ptr<BloomFilter> dstBloomFilter = BloomFilterUTF8Utils::deserialize( proto::Stream_Kind_BLOOM_FILTER_UTF8, encoding, pbBloomFilter); EXPECT_TRUE(srcBloomFilter == dynamic_cast<BloomFilterImpl&>(*dstBloomFilter)); EXPECT_TRUE(dstBloomFilter->testLong(1)); EXPECT_TRUE(dstBloomFilter->testLong(11)); EXPECT_TRUE(dstBloomFilter->testLong(111)); EXPECT_TRUE(dstBloomFilter->testLong(1111)); EXPECT_TRUE(dstBloomFilter->testLong(0)); EXPECT_TRUE(dstBloomFilter->testLong(-1)); EXPECT_TRUE(dstBloomFilter->testLong(-11)); EXPECT_TRUE(dstBloomFilter->testLong(-111)); EXPECT_TRUE(dstBloomFilter->testLong(-1111)); } }
4,498
1,006
<filename>libs/libc/grp/lib_getgrnamr.c /**************************************************************************** * libs/libc/grp/lib_getgrnamr.c * * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <string.h> #include <grp.h> #include "grp/lib_grp.h" /**************************************************************************** * Public Functions ****************************************************************************/ /**************************************************************************** * Name: getgrnam_r * * Description: * The getgrnam_r() function searches the group database for an entry with * a matching name and stores the retrieved group structure in the space * pointed to by grp. * * Input Parameters: * name - The name of the group to return a group structure for. * grp - Pointer to the space to store the retrieved group structure in. * buf - The string fields pointed to by the group struct are stored here. * buflen - The length of buf in bytes. * result - Pointer to the resulting group struct, or NULL in case of fail. * * Returned Value: * On success getgrnam_r returns 0 and sets *result to grp. If no match * is found, 0 is returned and *result is set to NULL. In case of failure * an error number is returned. * ****************************************************************************/ int getgrnam_r(FAR const char *name, FAR struct group *grp, FAR char *buf, size_t buflen, FAR struct group **result) { #ifdef CONFIG_LIBC_GROUP_FILE int ret; ret = grp_findby_name(name, grp, buf, buflen); if (ret != 1) { *result = NULL; return ret < 0 ? -ret : 0; } *result = grp; return 0; #else if (strcmp(name, ROOT_NAME)) { /* The only known group is 'root', which has a gid of 0. Thus, report * back that no match was found. */ *result = NULL; return 0; } return getgrbuf_r(ROOT_GID, ROOT_NAME, ROOT_PASSWD, grp, buf, buflen, result); #endif }
874
335
{ "word": "Baffle", "definitions": [ "A device used to restrain the flow of a fluid, gas, etc. or to prevent the spreading of sound or light in a particular direction." ], "parts-of-speech": "Noun" }
81
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_scripting.hxx" #include "basmodnode.hxx" #include "basmethnode.hxx" #include <com/sun/star/script/browse/BrowseNodeTypes.hpp> #include <vos/mutex.hxx> #include <vcl/svapp.hxx> #include <basic/sbx.hxx> #include <basic/sbstar.hxx> #include <basic/sbmod.hxx> #include <basic/sbmeth.hxx> using namespace ::com::sun::star; using namespace ::com::sun::star::lang; using namespace ::com::sun::star::uno; using namespace ::com::sun::star::script; //......................................................................... namespace basprov { //......................................................................... // ============================================================================= // BasicModuleNodeImpl // ============================================================================= BasicModuleNodeImpl::BasicModuleNodeImpl( const Reference< XComponentContext >& rxContext, const ::rtl::OUString& sScriptingContext, SbModule* pModule, bool isAppScript ) :m_xContext( rxContext ) ,m_sScriptingContext( sScriptingContext ) ,m_pModule( pModule ) ,m_bIsAppScript( isAppScript ) { } // ----------------------------------------------------------------------------- BasicModuleNodeImpl::~BasicModuleNodeImpl() { } // ----------------------------------------------------------------------------- // XBrowseNode // ----------------------------------------------------------------------------- ::rtl::OUString BasicModuleNodeImpl::getName( ) throw (RuntimeException) { ::vos::OGuard aGuard( Application::GetSolarMutex() ); ::rtl::OUString sModuleName; if ( m_pModule ) sModuleName = m_pModule->GetName(); return sModuleName; } // ----------------------------------------------------------------------------- Sequence< Reference< browse::XBrowseNode > > BasicModuleNodeImpl::getChildNodes( ) throw (RuntimeException) { ::vos::OGuard aGuard( Application::GetSolarMutex() ); Sequence< Reference< browse::XBrowseNode > > aChildNodes; if ( m_pModule ) { SbxArray* pMethods = m_pModule->GetMethods(); if ( pMethods ) { sal_Int32 nCount = pMethods->Count(); sal_Int32 nRealCount = 0; for ( sal_Int32 i = 0; i < nCount; ++i ) { SbMethod* pMethod = static_cast< SbMethod* >( pMethods->Get( static_cast< sal_uInt16 >( i ) ) ); if ( pMethod && !pMethod->IsHidden() ) ++nRealCount; } aChildNodes.realloc( nRealCount ); Reference< browse::XBrowseNode >* pChildNodes = aChildNodes.getArray(); sal_Int32 iTarget = 0; for ( sal_Int32 i = 0; i < nCount; ++i ) { SbMethod* pMethod = static_cast< SbMethod* >( pMethods->Get( static_cast< sal_uInt16 >( i ) ) ); if ( pMethod && !pMethod->IsHidden() ) pChildNodes[iTarget++] = static_cast< browse::XBrowseNode* >( new BasicMethodNodeImpl( m_xContext, m_sScriptingContext, pMethod, m_bIsAppScript ) ); } } } return aChildNodes; } // ----------------------------------------------------------------------------- sal_Bool BasicModuleNodeImpl::hasChildNodes( ) throw (RuntimeException) { ::vos::OGuard aGuard( Application::GetSolarMutex() ); sal_Bool bReturn = sal_False; if ( m_pModule ) { SbxArray* pMethods = m_pModule->GetMethods(); if ( pMethods && pMethods->Count() > 0 ) bReturn = sal_True; } return bReturn; } // ----------------------------------------------------------------------------- sal_Int16 BasicModuleNodeImpl::getType( ) throw (RuntimeException) { ::vos::OGuard aGuard( Application::GetSolarMutex() ); return browse::BrowseNodeTypes::CONTAINER; } // ----------------------------------------------------------------------------- //......................................................................... } // namespace basprov //.........................................................................
1,821
4,879
typedef NS_ENUM(NSInteger, MWMCircularProgressState) { MWMCircularProgressStateNormal, MWMCircularProgressStateSelected, MWMCircularProgressStateProgress, MWMCircularProgressStateSpinner, MWMCircularProgressStateFailed, MWMCircularProgressStateCompleted };
84
1,101
""" Quick script to output JS code for including the listed icons in base64 Usage: python base64_conv.py <filenames> """ import os import argparse import json def compute_base64_encoding(filename): base, suffix = os.path.splitext(filename) suffix = suffix.lstrip('.') with open(filename, 'rb') as f: data = f.read().encode("base64") return (base, "data:image/{0};base64,{1}".format(suffix, data)) def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("files", nargs='*', type=str) args = parser.parse_args() encodings = dict([compute_base64_encoding(filename) for filename in args.files]) print json.dumps(dict(encodings)) if __name__ == "__main__": main()
310
1,068
<gh_stars>1000+ package org.assertj.android.api.app; import android.annotation.TargetApi; import android.app.ListFragment; import static android.os.Build.VERSION_CODES.HONEYCOMB; import static org.assertj.core.api.Assertions.assertThat; /** Assertions for {@link ListFragment} instances. */ @TargetApi(HONEYCOMB) public class ListFragmentAssert extends AbstractFragmentAssert<ListFragmentAssert, ListFragment> { public ListFragmentAssert(ListFragment actual) { super(actual, ListFragmentAssert.class); } public ListFragmentAssert hasSelectedItemId(long id) { isNotNull(); long actualId = actual.getSelectedItemId(); assertThat(actualId) // .overridingErrorMessage("Expected selected ID <%s> but was <%s>.", id, actualId) // .isEqualTo(id); return this; } public ListFragmentAssert hasSelectedItemPosition(int position) { isNotNull(); long actualPosition = actual.getSelectedItemPosition(); assertThat(actualPosition) // .overridingErrorMessage("Expected selected position <%s> but was <%s>.", position, actualPosition) // .isEqualTo(position); return this; } }
415
1,144
<reponame>dram/metasfresh package de.metas.marketing.gateway.cleverreach.restapi.models; import static org.assertj.core.api.Assertions.assertThat; import java.io.IOException; import org.junit.jupiter.api.Test; import com.fasterxml.jackson.databind.ObjectMapper; /* * #%L * marketing-cleverreach * %% * Copyright (C) 2020 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ class ReceiverTest { @Test void serialize_deserialize() throws IOException { final Receiver receiver = Receiver.builder().activated(100L).active(true) .bounced(1) .deactivated(90L) .email("email") .groups_id(20) .id(30) .imported(80) .last_client("last_client") .last_ip("last_ip") .last_location("last_location") .attribute("attributeKey1", "attributeValue") .attribute("attributeKey2", "attributeValue") .global_attribute("global_attributeKey1", "global_attributeValue") .global_attribute("global_attributeKey2", "global_attributeValue") .build(); final ObjectMapper objectMapper = new ObjectMapper(); final String jsonString = objectMapper.writeValueAsString(receiver); final Receiver receiverFromJson = objectMapper.readValue(jsonString, Receiver.class); assertThat(receiverFromJson).isEqualTo(receiver); } }
632
333
package com.alipay.api.domain; import java.util.List; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; import com.alipay.api.internal.mapping.ApiListField; /** * 跨境营销线上电商商品扩展参数 * * @author auto create * @since 1.0, 2021-08-27 11:06:43 */ public class GolGoodsExtParam extends AlipayObject { private static final long serialVersionUID = 3245522581964277735L; /** * 品牌名称 */ @ApiField("brand_name") private String brandName; /** * 商品描述文案 */ @ApiField("description") private String description; /** * 是否包税。 Y - 包税 N - 不包税 */ @ApiField("is_tax_free") private String isTaxFree; /** * 规格 */ @ApiListField("specifications") @ApiField("string") private List<String> specifications; /** * 库存状态。 库存充裕:STOCK_SUFFICIENT 库存紧张:STOCK_INSUFFICIENT 库存为零:OUT_OF_STOCK */ @ApiField("stock_status") private String stockStatus; /** * 率百分比数字,按照百分比传字符串。不包税的商品必填。精度:小数点后2位。 */ @ApiField("tax_rate") private String taxRate; public String getBrandName() { return this.brandName; } public void setBrandName(String brandName) { this.brandName = brandName; } public String getDescription() { return this.description; } public void setDescription(String description) { this.description = description; } public String getIsTaxFree() { return this.isTaxFree; } public void setIsTaxFree(String isTaxFree) { this.isTaxFree = isTaxFree; } public List<String> getSpecifications() { return this.specifications; } public void setSpecifications(List<String> specifications) { this.specifications = specifications; } public String getStockStatus() { return this.stockStatus; } public void setStockStatus(String stockStatus) { this.stockStatus = stockStatus; } public String getTaxRate() { return this.taxRate; } public void setTaxRate(String taxRate) { this.taxRate = taxRate; } }
969
473
<gh_stars>100-1000 #ifndef Py_ABSTRACTOBJECT_H #define Py_ABSTRACTOBJECT_H #ifdef __cplusplus extern "C" { #endif #ifdef PY_SSIZE_T_CLEAN #define PyObject_CallFunction _PyObject_CallFunction_SizeT #define PyObject_CallMethod _PyObject_CallMethod_SizeT #endif /* Abstract Object Interface (many thanks to <NAME>) */ /* PROPOSAL: A Generic Python Object Interface for Python C Modules Problem Python modules written in C that must access Python objects must do so through routines whose interfaces are described by a set of include files. Unfortunately, these routines vary according to the object accessed. To use these routines, the C programmer must check the type of the object being used and must call a routine based on the object type. For example, to access an element of a sequence, the programmer must determine whether the sequence is a list or a tuple: if(is_tupleobject(o)) e=gettupleitem(o,i) else if(is_listitem(o)) e=getlistitem(o,i) If the programmer wants to get an item from another type of object that provides sequence behavior, there is no clear way to do it correctly. The persistent programmer may peruse object.h and find that the _typeobject structure provides a means of invoking up to (currently about) 41 special operators. So, for example, a routine can get an item from any object that provides sequence behavior. However, to use this mechanism, the programmer must make their code dependent on the current Python implementation. Also, certain semantics, especially memory management semantics, may differ by the type of object being used. Unfortunately, these semantics are not clearly described in the current include files. An abstract interface providing more consistent semantics is needed. Proposal I propose the creation of a standard interface (with an associated library of routines and/or macros) for generically obtaining the services of Python objects. This proposal can be viewed as one components of a Python C interface consisting of several components. From the viewpoint of C access to Python services, we have (as suggested by Guido in off-line discussions): - "Very high level layer": two or three functions that let you exec or eval arbitrary Python code given as a string in a module whose name is given, passing C values in and getting C values out using mkvalue/getargs style format strings. This does not require the user to declare any variables of type "PyObject *". This should be enough to write a simple application that gets Python code from the user, execs it, and returns the output or errors. (Error handling must also be part of this API.) - "Abstract objects layer": which is the subject of this proposal. It has many functions operating on objects, and lest you do many things from C that you can also write in Python, without going through the Python parser. - "Concrete objects layer": This is the public type-dependent interface provided by the standard built-in types, such as floats, strings, and lists. This interface exists and is currently documented by the collection of include files provided with the Python distributions. From the point of view of Python accessing services provided by C modules: - "Python module interface": this interface consist of the basic routines used to define modules and their members. Most of the current extensions-writing guide deals with this interface. - "Built-in object interface": this is the interface that a new built-in type must provide and the mechanisms and rules that a developer of a new built-in type must use and follow. This proposal is a "first-cut" that is intended to spur discussion. See especially the lists of notes. The Python C object interface will provide four protocols: object, numeric, sequence, and mapping. Each protocol consists of a collection of related operations. If an operation that is not provided by a particular type is invoked, then a standard exception, NotImplementedError is raised with an operation name as an argument. In addition, for convenience this interface defines a set of constructors for building objects of built-in types. This is needed so new objects can be returned from C functions that otherwise treat objects generically. Memory Management For all of the functions described in this proposal, if a function retains a reference to a Python object passed as an argument, then the function will increase the reference count of the object. It is unnecessary for the caller to increase the reference count of an argument in anticipation of the object's retention. All Python objects returned from functions should be treated as new objects. Functions that return objects assume that the caller will retain a reference and the reference count of the object has already been incremented to account for this fact. A caller that does not retain a reference to an object that is returned from a function must decrement the reference count of the object (using DECREF(object)) to prevent memory leaks. Note that the behavior mentioned here is different from the current behavior for some objects (e.g. lists and tuples) when certain type-specific routines are called directly (e.g. setlistitem). The proposed abstraction layer will provide a consistent memory management interface, correcting for inconsistent behavior for some built-in types. Protocols xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/ /* Object Protocol: */ /* Implemented elsewhere: int PyObject_Print(PyObject *o, FILE *fp, int flags); Print an object, o, on file, fp. Returns -1 on error. The flags argument is used to enable certain printing options. The only option currently supported is Py_Print_RAW. (What should be said about Py_Print_RAW?) */ /* Implemented elsewhere: int PyObject_HasAttrString(PyObject *o, char *attr_name); Returns 1 if o has the attribute attr_name, and 0 otherwise. This is equivalent to the Python expression: hasattr(o,attr_name). This function always succeeds. */ /* Implemented elsewhere: PyObject* PyObject_GetAttrString(PyObject *o, char *attr_name); Retrieve an attributed named attr_name form object o. Returns the attribute value on success, or NULL on failure. This is the equivalent of the Python expression: o.attr_name. */ /* Implemented elsewhere: int PyObject_HasAttr(PyObject *o, PyObject *attr_name); Returns 1 if o has the attribute attr_name, and 0 otherwise. This is equivalent to the Python expression: hasattr(o,attr_name). This function always succeeds. */ /* Implemented elsewhere: PyObject* PyObject_GetAttr(PyObject *o, PyObject *attr_name); Retrieve an attributed named attr_name form object o. Returns the attribute value on success, or NULL on failure. This is the equivalent of the Python expression: o.attr_name. */ /* Implemented elsewhere: int PyObject_SetAttrString(PyObject *o, char *attr_name, PyObject *v); Set the value of the attribute named attr_name, for object o, to the value v. Raise an exception and return -1 on failure; return 0 on success. This is the equivalent of the Python statement o.attr_name=v. */ /* Implemented elsewhere: int PyObject_SetAttr(PyObject *o, PyObject *attr_name, PyObject *v); Set the value of the attribute named attr_name, for object o, to the value v. Raise an exception and return -1 on failure; return 0 on success. This is the equivalent of the Python statement o.attr_name=v. */ /* implemented as a macro: int PyObject_DelAttrString(PyObject *o, char *attr_name); Delete attribute named attr_name, for object o. Returns -1 on failure. This is the equivalent of the Python statement: del o.attr_name. */ #define PyObject_DelAttrString(O,A) PyObject_SetAttrString((O),(A),NULL) /* implemented as a macro: int PyObject_DelAttr(PyObject *o, PyObject *attr_name); Delete attribute named attr_name, for object o. Returns -1 on failure. This is the equivalent of the Python statement: del o.attr_name. */ #define PyObject_DelAttr(O,A) PyObject_SetAttr((O),(A),NULL) PyAPI_FUNC(int) PyObject_Cmp(PyObject *o1, PyObject *o2, int *result); /* Compare the values of o1 and o2 using a routine provided by o1, if one exists, otherwise with a routine provided by o2. The result of the comparison is returned in result. Returns -1 on failure. This is the equivalent of the Python statement: result=cmp(o1,o2). */ /* Implemented elsewhere: int PyObject_Compare(PyObject *o1, PyObject *o2); Compare the values of o1 and o2 using a routine provided by o1, if one exists, otherwise with a routine provided by o2. Returns the result of the comparison on success. On error, the value returned is undefined. This is equivalent to the Python expression: cmp(o1,o2). */ /* Implemented elsewhere: PyObject *PyObject_Repr(PyObject *o); Compute the string representation of object, o. Returns the string representation on success, NULL on failure. This is the equivalent of the Python expression: repr(o). Called by the repr() built-in function and by reverse quotes. */ /* Implemented elsewhere: PyObject *PyObject_Str(PyObject *o); Compute the string representation of object, o. Returns the string representation on success, NULL on failure. This is the equivalent of the Python expression: str(o).) Called by the str() built-in function and by the print statement. */ /* Implemented elsewhere: PyObject *PyObject_Unicode(PyObject *o); Compute the unicode representation of object, o. Returns the unicode representation on success, NULL on failure. This is the equivalent of the Python expression: unistr(o).) Called by the unistr() built-in function. */ /* Declared elsewhere PyAPI_FUNC(int) PyCallable_Check(PyObject *o); Determine if the object, o, is callable. Return 1 if the object is callable and 0 otherwise. This function always succeeds. */ PyAPI_FUNC(PyObject *) PyObject_Call(PyObject *callable_object, PyObject *args, PyObject *kw); /* Call a callable Python object, callable_object, with arguments and keywords arguments. The 'args' argument can not be NULL, but the 'kw' argument can be NULL. */ PyAPI_FUNC(PyObject *) PyObject_CallObject(PyObject *callable_object, PyObject *args); /* Call a callable Python object, callable_object, with arguments given by the tuple, args. If no arguments are needed, then args may be NULL. Returns the result of the call on success, or NULL on failure. This is the equivalent of the Python expression: apply(o,args). */ PyAPI_FUNC(PyObject *) PyObject_CallFunction(PyObject *callable_object, char *format, ...); /* Call a callable Python object, callable_object, with a variable number of C arguments. The C arguments are described using a mkvalue-style format string. The format may be NULL, indicating that no arguments are provided. Returns the result of the call on success, or NULL on failure. This is the equivalent of the Python expression: apply(o,args). */ PyAPI_FUNC(PyObject *) PyObject_CallMethod(PyObject *o, char *m, char *format, ...); /* Call the method named m of object o with a variable number of C arguments. The C arguments are described by a mkvalue format string. The format may be NULL, indicating that no arguments are provided. Returns the result of the call on success, or NULL on failure. This is the equivalent of the Python expression: o.method(args). */ PyAPI_FUNC(PyObject *) _PyObject_CallFunction_SizeT(PyObject *callable, char *format, ...); PyAPI_FUNC(PyObject *) _PyObject_CallMethod_SizeT(PyObject *o, char *name, char *format, ...); PyAPI_FUNC(PyObject *) PyObject_CallFunctionObjArgs(PyObject *callable, ...); /* Call a callable Python object, callable_object, with a variable number of C arguments. The C arguments are provided as PyObject * values, terminated by a NULL. Returns the result of the call on success, or NULL on failure. This is the equivalent of the Python expression: apply(o,args). */ PyAPI_FUNC(PyObject *) PyObject_CallMethodObjArgs(PyObject *o, PyObject *m, ...); /* Call the method named m of object o with a variable number of C arguments. The C arguments are provided as PyObject * values, terminated by NULL. Returns the result of the call on success, or NULL on failure. This is the equivalent of the Python expression: o.method(args). */ /* Implemented elsewhere: long PyObject_Hash(PyObject *o); Compute and return the hash, hash_value, of an object, o. On failure, return -1. This is the equivalent of the Python expression: hash(o). */ /* Implemented elsewhere: int PyObject_IsTrue(PyObject *o); Returns 1 if the object, o, is considered to be true, 0 if o is considered to be false and -1 on failure. This is equivalent to the Python expression: not not o */ /* Implemented elsewhere: int PyObject_Not(PyObject *o); Returns 0 if the object, o, is considered to be true, 1 if o is considered to be false and -1 on failure. This is equivalent to the Python expression: not o */ PyAPI_FUNC(PyObject *) PyObject_Type(PyObject *o); /* On success, returns a type object corresponding to the object type of object o. On failure, returns NULL. This is equivalent to the Python expression: type(o). */ PyAPI_FUNC(Py_ssize_t) PyObject_Size(PyObject *o); /* Return the size of object o. If the object, o, provides both sequence and mapping protocols, the sequence size is returned. On error, -1 is returned. This is the equivalent to the Python expression: len(o). */ /* For DLL compatibility */ #undef PyObject_Length PyAPI_FUNC(Py_ssize_t) PyObject_Length(PyObject *o); #define PyObject_Length PyObject_Size PyAPI_FUNC(Py_ssize_t) _PyObject_LengthHint(PyObject *o, Py_ssize_t); /* Guess the size of object o using len(o) or o.__length_hint__(). If neither of those return a non-negative value, then return the default value. If one of the calls fails, this function returns -1. */ PyAPI_FUNC(PyObject *) PyObject_GetItem(PyObject *o, PyObject *key); /* Return element of o corresponding to the object, key, or NULL on failure. This is the equivalent of the Python expression: o[key]. */ PyAPI_FUNC(int) PyObject_SetItem(PyObject *o, PyObject *key, PyObject *v); /* Map the object key to the value v. Raise an exception and return -1 on failure; return 0 on success. This is the equivalent of the Python statement o[key]=v. */ PyAPI_FUNC(int) PyObject_DelItemString(PyObject *o, char *key); /* Remove the mapping for object, key, from the object *o. Returns -1 on failure. This is equivalent to the Python statement: del o[key]. */ PyAPI_FUNC(int) PyObject_DelItem(PyObject *o, PyObject *key); /* Delete the mapping for key from *o. Returns -1 on failure. This is the equivalent of the Python statement: del o[key]. */ PyAPI_FUNC(int) PyObject_AsCharBuffer(PyObject *obj, const char **buffer, Py_ssize_t *buffer_len); /* Takes an arbitrary object which must support the (character, single segment) buffer interface and returns a pointer to a read-only memory location useable as character based input for subsequent processing. 0 is returned on success. buffer and buffer_len are only set in case no error occurs. Otherwise, -1 is returned and an exception set. */ PyAPI_FUNC(int) PyObject_CheckReadBuffer(PyObject *obj); /* Checks whether an arbitrary object supports the (character, single segment) buffer interface. Returns 1 on success, 0 on failure. */ PyAPI_FUNC(int) PyObject_AsReadBuffer(PyObject *obj, const void **buffer, Py_ssize_t *buffer_len); /* Same as PyObject_AsCharBuffer() except that this API expects (readable, single segment) buffer interface and returns a pointer to a read-only memory location which can contain arbitrary data. 0 is returned on success. buffer and buffer_len are only set in case no error occurs. Otherwise, -1 is returned and an exception set. */ PyAPI_FUNC(int) PyObject_AsWriteBuffer(PyObject *obj, void **buffer, Py_ssize_t *buffer_len); /* Takes an arbitrary object which must support the (writeable, single segment) buffer interface and returns a pointer to a writeable memory location in buffer of size buffer_len. 0 is returned on success. buffer and buffer_len are only set in case no error occurs. Otherwise, -1 is returned and an exception set. */ /* new buffer API */ #define PyObject_CheckBuffer(obj) \ (((obj)->ob_type->tp_as_buffer != NULL) && \ (PyType_HasFeature((obj)->ob_type, Py_TPFLAGS_HAVE_NEWBUFFER)) && \ ((obj)->ob_type->tp_as_buffer->bf_getbuffer != NULL)) /* Return 1 if the getbuffer function is available, otherwise return 0 */ PyAPI_FUNC(int) PyObject_GetBuffer(PyObject *obj, Py_buffer *view, int flags); /* This is a C-API version of the getbuffer function call. It checks to make sure object has the required function pointer and issues the call. Returns -1 and raises an error on failure and returns 0 on success */ PyAPI_FUNC(void *) PyBuffer_GetPointer(Py_buffer *view, Py_ssize_t *indices); /* Get the memory area pointed to by the indices for the buffer given. Note that view->ndim is the assumed size of indices */ PyAPI_FUNC(int) PyBuffer_SizeFromFormat(const char *); /* Return the implied itemsize of the data-format area from a struct-style description */ PyAPI_FUNC(int) PyBuffer_ToContiguous(void *buf, Py_buffer *view, Py_ssize_t len, char fort); PyAPI_FUNC(int) PyBuffer_FromContiguous(Py_buffer *view, void *buf, Py_ssize_t len, char fort); /* Copy len bytes of data from the contiguous chunk of memory pointed to by buf into the buffer exported by obj. Return 0 on success and return -1 and raise a PyBuffer_Error on error (i.e. the object does not have a buffer interface or it is not working). If fort is 'F' and the object is multi-dimensional, then the data will be copied into the array in Fortran-style (first dimension varies the fastest). If fort is 'C', then the data will be copied into the array in C-style (last dimension varies the fastest). If fort is 'A', then it does not matter and the copy will be made in whatever way is more efficient. */ PyAPI_FUNC(int) PyObject_CopyData(PyObject *dest, PyObject *src); /* Copy the data from the src buffer to the buffer of destination */ PyAPI_FUNC(int) PyBuffer_IsContiguous(Py_buffer *view, char fort); PyAPI_FUNC(void) PyBuffer_FillContiguousStrides(int ndims, Py_ssize_t *shape, Py_ssize_t *strides, int itemsize, char fort); /* Fill the strides array with byte-strides of a contiguous (Fortran-style if fort is 'F' or C-style otherwise) array of the given shape with the given number of bytes per element. */ PyAPI_FUNC(int) PyBuffer_FillInfo(Py_buffer *view, PyObject *o, void *buf, Py_ssize_t len, int readonly, int flags); /* Fills in a buffer-info structure correctly for an exporter that can only share a contiguous chunk of memory of "unsigned bytes" of the given length. Returns 0 on success and -1 (with raising an error) on error. */ PyAPI_FUNC(void) PyBuffer_Release(Py_buffer *view); /* Releases a Py_buffer obtained from getbuffer ParseTuple's s*. */ PyAPI_FUNC(PyObject *) PyObject_Format(PyObject* obj, PyObject *format_spec); /* Takes an arbitrary object and returns the result of calling obj.__format__(format_spec). */ /* Iterators */ PyAPI_FUNC(PyObject *) PyObject_GetIter(PyObject *); /* Takes an object and returns an iterator for it. This is typically a new iterator but if the argument is an iterator, this returns itself. */ #define PyIter_Check(obj) \ (PyType_HasFeature((obj)->ob_type, Py_TPFLAGS_HAVE_ITER) && \ (obj)->ob_type->tp_iternext != NULL && \ (obj)->ob_type->tp_iternext != &_PyObject_NextNotImplemented) PyAPI_FUNC(PyObject *) PyIter_Next(PyObject *); /* Takes an iterator object and calls its tp_iternext slot, returning the next value. If the iterator is exhausted, this returns NULL without setting an exception. NULL with an exception means an error occurred. */ /* Number Protocol:*/ PyAPI_FUNC(int) PyNumber_Check(PyObject *o); /* Returns 1 if the object, o, provides numeric protocols, and false otherwise. This function always succeeds. */ PyAPI_FUNC(PyObject *) PyNumber_Add(PyObject *o1, PyObject *o2); /* Returns the result of adding o1 and o2, or null on failure. This is the equivalent of the Python expression: o1+o2. */ PyAPI_FUNC(PyObject *) PyNumber_Subtract(PyObject *o1, PyObject *o2); /* Returns the result of subtracting o2 from o1, or null on failure. This is the equivalent of the Python expression: o1-o2. */ PyAPI_FUNC(PyObject *) PyNumber_Multiply(PyObject *o1, PyObject *o2); /* Returns the result of multiplying o1 and o2, or null on failure. This is the equivalent of the Python expression: o1*o2. */ PyAPI_FUNC(PyObject *) PyNumber_MatrixMultiply(PyObject *o1, PyObject *o2); /* This is the equivalent of the Python expression: o1 @ o2. */ PyAPI_FUNC(PyObject *) PyNumber_Divide(PyObject *o1, PyObject *o2); /* Returns the result of dividing o1 by o2, or null on failure. This is the equivalent of the Python expression: o1/o2. */ PyAPI_FUNC(PyObject *) PyNumber_FloorDivide(PyObject *o1, PyObject *o2); /* Returns the result of dividing o1 by o2 giving an integral result, or null on failure. This is the equivalent of the Python expression: o1//o2. */ PyAPI_FUNC(PyObject *) PyNumber_TrueDivide(PyObject *o1, PyObject *o2); /* Returns the result of dividing o1 by o2 giving a float result, or null on failure. This is the equivalent of the Python expression: o1/o2. */ PyAPI_FUNC(PyObject *) PyNumber_Remainder(PyObject *o1, PyObject *o2); /* Returns the remainder of dividing o1 by o2, or null on failure. This is the equivalent of the Python expression: o1%o2. */ PyAPI_FUNC(PyObject *) PyNumber_Divmod(PyObject *o1, PyObject *o2); /* See the built-in function divmod. Returns NULL on failure. This is the equivalent of the Python expression: divmod(o1,o2). */ PyAPI_FUNC(PyObject *) PyNumber_Power(PyObject *o1, PyObject *o2, PyObject *o3); /* See the built-in function pow. Returns NULL on failure. This is the equivalent of the Python expression: pow(o1,o2,o3), where o3 is optional. */ PyAPI_FUNC(PyObject *) PyNumber_Negative(PyObject *o); /* Returns the negation of o on success, or null on failure. This is the equivalent of the Python expression: -o. */ PyAPI_FUNC(PyObject *) PyNumber_Positive(PyObject *o); /* Returns the (what?) of o on success, or NULL on failure. This is the equivalent of the Python expression: +o. */ PyAPI_FUNC(PyObject *) PyNumber_Absolute(PyObject *o); /* Returns the absolute value of o, or null on failure. This is the equivalent of the Python expression: abs(o). */ PyAPI_FUNC(PyObject *) PyNumber_Invert(PyObject *o); /* Returns the bitwise negation of o on success, or NULL on failure. This is the equivalent of the Python expression: ~o. */ PyAPI_FUNC(PyObject *) PyNumber_Lshift(PyObject *o1, PyObject *o2); /* Returns the result of left shifting o1 by o2 on success, or NULL on failure. This is the equivalent of the Python expression: o1 << o2. */ PyAPI_FUNC(PyObject *) PyNumber_Rshift(PyObject *o1, PyObject *o2); /* Returns the result of right shifting o1 by o2 on success, or NULL on failure. This is the equivalent of the Python expression: o1 >> o2. */ PyAPI_FUNC(PyObject *) PyNumber_And(PyObject *o1, PyObject *o2); /* Returns the result of bitwise and of o1 and o2 on success, or NULL on failure. This is the equivalent of the Python expression: o1&o2. */ PyAPI_FUNC(PyObject *) PyNumber_Xor(PyObject *o1, PyObject *o2); /* Returns the bitwise exclusive or of o1 by o2 on success, or NULL on failure. This is the equivalent of the Python expression: o1^o2. */ PyAPI_FUNC(PyObject *) PyNumber_Or(PyObject *o1, PyObject *o2); /* Returns the result of bitwise or on o1 and o2 on success, or NULL on failure. This is the equivalent of the Python expression: o1|o2. */ /* Implemented elsewhere: int PyNumber_Coerce(PyObject **p1, PyObject **p2); This function takes the addresses of two variables of type PyObject*. If the objects pointed to by *p1 and *p2 have the same type, increment their reference count and return 0 (success). If the objects can be converted to a common numeric type, replace *p1 and *p2 by their converted value (with 'new' reference counts), and return 0. If no conversion is possible, or if some other error occurs, return -1 (failure) and don't increment the reference counts. The call PyNumber_Coerce(&o1, &o2) is equivalent to the Python statement o1, o2 = coerce(o1, o2). */ #define PyIndex_Check(obj) \ ((obj)->ob_type->tp_as_number != NULL && \ PyType_HasFeature((obj)->ob_type, Py_TPFLAGS_HAVE_INDEX) && \ (obj)->ob_type->tp_as_number->nb_index != NULL) PyAPI_FUNC(PyObject *) PyNumber_Index(PyObject *o); /* Returns the object converted to a Python long or int or NULL with an error raised on failure. */ PyAPI_FUNC(Py_ssize_t) PyNumber_AsSsize_t(PyObject *o, PyObject *exc); /* Returns the Integral instance converted to an int. The instance is expected to be int or long or have an __int__ method. Steals integral's reference. error_format will be used to create the TypeError if integral isn't actually an Integral instance. error_format should be a format string that can accept a char* naming integral's type. */ PyAPI_FUNC(PyObject *) _PyNumber_ConvertIntegralToInt( PyObject *integral, const char* error_format); /* Returns the object converted to Py_ssize_t by going through PyNumber_Index first. If an overflow error occurs while converting the int-or-long to Py_ssize_t, then the second argument is the error-type to return. If it is NULL, then the overflow error is cleared and the value is clipped. */ PyAPI_FUNC(PyObject *) PyNumber_Int(PyObject *o); /* Returns the o converted to an integer object on success, or NULL on failure. This is the equivalent of the Python expression: int(o). */ PyAPI_FUNC(PyObject *) PyNumber_Long(PyObject *o); /* Returns the o converted to a long integer object on success, or NULL on failure. This is the equivalent of the Python expression: long(o). */ PyAPI_FUNC(PyObject *) PyNumber_Float(PyObject *o); /* Returns the o converted to a float object on success, or NULL on failure. This is the equivalent of the Python expression: float(o). */ /* In-place variants of (some of) the above number protocol functions */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceAdd(PyObject *o1, PyObject *o2); /* Returns the result of adding o2 to o1, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 += o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceSubtract(PyObject *o1, PyObject *o2); /* Returns the result of subtracting o2 from o1, possibly in-place or null on failure. This is the equivalent of the Python expression: o1 -= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceMultiply(PyObject *o1, PyObject *o2); /* Returns the result of multiplying o1 by o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 *= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceMatrixMultiply(PyObject *o1, PyObject *o2); /* This is the equivalent of the Python expression: o1 @= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceDivide(PyObject *o1, PyObject *o2); /* Returns the result of dividing o1 by o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 /= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceFloorDivide(PyObject *o1, PyObject *o2); /* Returns the result of dividing o1 by o2 giving an integral result, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 /= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceTrueDivide(PyObject *o1, PyObject *o2); /* Returns the result of dividing o1 by o2 giving a float result, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 /= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceRemainder(PyObject *o1, PyObject *o2); /* Returns the remainder of dividing o1 by o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 %= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlacePower(PyObject *o1, PyObject *o2, PyObject *o3); /* Returns the result of raising o1 to the power of o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 **= o2, or pow(o1, o2, o3) if o3 is present. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceLshift(PyObject *o1, PyObject *o2); /* Returns the result of left shifting o1 by o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 <<= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceRshift(PyObject *o1, PyObject *o2); /* Returns the result of right shifting o1 by o2, possibly in-place or null on failure. This is the equivalent of the Python expression: o1 >>= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceAnd(PyObject *o1, PyObject *o2); /* Returns the result of bitwise and of o1 and o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 &= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceXor(PyObject *o1, PyObject *o2); /* Returns the bitwise exclusive or of o1 by o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 ^= o2. */ PyAPI_FUNC(PyObject *) PyNumber_InPlaceOr(PyObject *o1, PyObject *o2); /* Returns the result of bitwise or of o1 and o2, possibly in-place, or null on failure. This is the equivalent of the Python expression: o1 |= o2. */ PyAPI_FUNC(PyObject *) PyNumber_ToBase(PyObject *n, int base); /* Returns the integer n converted to a string with a base, with a base marker of 0b, 0o or 0x prefixed if applicable. If n is not an int object, it is converted with PyNumber_Index first. */ /* Sequence protocol:*/ PyAPI_FUNC(int) PySequence_Check(PyObject *o); /* Return 1 if the object provides sequence protocol, and zero otherwise. This function always succeeds. */ PyAPI_FUNC(Py_ssize_t) PySequence_Size(PyObject *o); /* Return the size of sequence object o, or -1 on failure. */ /* For DLL compatibility */ #undef PySequence_Length PyAPI_FUNC(Py_ssize_t) PySequence_Length(PyObject *o); #define PySequence_Length PySequence_Size PyAPI_FUNC(PyObject *) PySequence_Concat(PyObject *o1, PyObject *o2); /* Return the concatenation of o1 and o2 on success, and NULL on failure. This is the equivalent of the Python expression: o1+o2. */ PyAPI_FUNC(PyObject *) PySequence_Repeat(PyObject *o, Py_ssize_t count); /* Return the result of repeating sequence object o count times, or NULL on failure. This is the equivalent of the Python expression: o1*count. */ PyAPI_FUNC(PyObject *) PySequence_GetItem(PyObject *o, Py_ssize_t i); /* Return the ith element of o, or NULL on failure. This is the equivalent of the Python expression: o[i]. */ PyAPI_FUNC(PyObject *) PySequence_GetSlice(PyObject *o, Py_ssize_t i1, Py_ssize_t i2); /* Return the slice of sequence object o between i1 and i2, or NULL on failure. This is the equivalent of the Python expression: o[i1:i2]. */ PyAPI_FUNC(int) PySequence_SetItem(PyObject *o, Py_ssize_t i, PyObject *v); /* Assign object v to the ith element of o. Raise an exception and return -1 on failure; return 0 on success. This is the equivalent of the Python statement o[i]=v. */ PyAPI_FUNC(int) PySequence_DelItem(PyObject *o, Py_ssize_t i); /* Delete the ith element of object v. Returns -1 on failure. This is the equivalent of the Python statement: del o[i]. */ PyAPI_FUNC(int) PySequence_SetSlice(PyObject *o, Py_ssize_t i1, Py_ssize_t i2, PyObject *v); /* Assign the sequence object, v, to the slice in sequence object, o, from i1 to i2. Raise an exception and return -1 on failure; return 0 on success. This is the equivalent of the Python statement: o[i1:i2]=v. */ PyAPI_FUNC(int) PySequence_DelSlice(PyObject *o, Py_ssize_t i1, Py_ssize_t i2); /* Delete the slice in sequence object, o, from i1 to i2. Returns -1 on failure. This is the equivalent of the Python statement: del o[i1:i2]. */ PyAPI_FUNC(PyObject *) PySequence_Tuple(PyObject *o); /* Returns the sequence, o, as a tuple on success, and NULL on failure. This is equivalent to the Python expression: tuple(o) */ PyAPI_FUNC(PyObject *) PySequence_List(PyObject *o); /* Returns the sequence, o, as a list on success, and NULL on failure. This is equivalent to the Python expression: list(o) */ PyAPI_FUNC(PyObject *) PySequence_Fast(PyObject *o, const char* m); /* Return the sequence, o, as a list, unless it's already a tuple or list. Use PySequence_Fast_GET_ITEM to access the members of this list, and PySequence_Fast_GET_SIZE to get its length. Returns NULL on failure. If the object does not support iteration, raises a TypeError exception with m as the message text. */ #define PySequence_Fast_GET_SIZE(o) \ (PyList_Check(o) ? PyList_GET_SIZE(o) : PyTuple_GET_SIZE(o)) /* Return the size of o, assuming that o was returned by PySequence_Fast and is not NULL. */ #define PySequence_Fast_GET_ITEM(o, i)\ (PyList_Check(o) ? PyList_GET_ITEM(o, i) : PyTuple_GET_ITEM(o, i)) /* Return the ith element of o, assuming that o was returned by PySequence_Fast, and that i is within bounds. */ #define PySequence_ITEM(o, i)\ ( Py_TYPE(o)->tp_as_sequence->sq_item(o, i) ) /* Assume tp_as_sequence and sq_item exist and that i does not need to be corrected for a negative index */ #define PySequence_Fast_ITEMS(sf) \ (PyList_Check(sf) ? ((PyListObject *)(sf))->ob_item \ : ((PyTupleObject *)(sf))->ob_item) /* Return a pointer to the underlying item array for an object retured by PySequence_Fast */ PyAPI_FUNC(Py_ssize_t) PySequence_Count(PyObject *o, PyObject *value); /* Return the number of occurrences on value on o, that is, return the number of keys for which o[key]==value. On failure, return -1. This is equivalent to the Python expression: o.count(value). */ PyAPI_FUNC(int) PySequence_Contains(PyObject *seq, PyObject *ob); /* Return -1 if error; 1 if ob in seq; 0 if ob not in seq. Use __contains__ if possible, else _PySequence_IterSearch(). */ #define PY_ITERSEARCH_COUNT 1 #define PY_ITERSEARCH_INDEX 2 #define PY_ITERSEARCH_CONTAINS 3 PyAPI_FUNC(Py_ssize_t) _PySequence_IterSearch(PyObject *seq, PyObject *obj, int operation); /* Iterate over seq. Result depends on the operation: PY_ITERSEARCH_COUNT: return # of times obj appears in seq; -1 if error. PY_ITERSEARCH_INDEX: return 0-based index of first occurrence of obj in seq; set ValueError and return -1 if none found; also return -1 on error. PY_ITERSEARCH_CONTAINS: return 1 if obj in seq, else 0; -1 on error. */ /* For DLL-level backwards compatibility */ #undef PySequence_In PyAPI_FUNC(int) PySequence_In(PyObject *o, PyObject *value); /* For source-level backwards compatibility */ #define PySequence_In PySequence_Contains /* Determine if o contains value. If an item in o is equal to X, return 1, otherwise return 0. On error, return -1. This is equivalent to the Python expression: value in o. */ PyAPI_FUNC(Py_ssize_t) PySequence_Index(PyObject *o, PyObject *value); /* Return the first index for which o[i]=value. On error, return -1. This is equivalent to the Python expression: o.index(value). */ /* In-place versions of some of the above Sequence functions. */ PyAPI_FUNC(PyObject *) PySequence_InPlaceConcat(PyObject *o1, PyObject *o2); /* Append o2 to o1, in-place when possible. Return the resulting object, which could be o1, or NULL on failure. This is the equivalent of the Python expression: o1 += o2. */ PyAPI_FUNC(PyObject *) PySequence_InPlaceRepeat(PyObject *o, Py_ssize_t count); /* Repeat o1 by count, in-place when possible. Return the resulting object, which could be o1, or NULL on failure. This is the equivalent of the Python expression: o1 *= count. */ /* Mapping protocol:*/ PyAPI_FUNC(int) PyMapping_Check(PyObject *o); /* Return 1 if the object provides mapping protocol, and zero otherwise. This function always succeeds. */ PyAPI_FUNC(Py_ssize_t) PyMapping_Size(PyObject *o); /* Returns the number of keys in object o on success, and -1 on failure. For objects that do not provide sequence protocol, this is equivalent to the Python expression: len(o). */ /* For DLL compatibility */ #undef PyMapping_Length PyAPI_FUNC(Py_ssize_t) PyMapping_Length(PyObject *o); #define PyMapping_Length PyMapping_Size /* implemented as a macro: int PyMapping_DelItemString(PyObject *o, char *key); Remove the mapping for object, key, from the object *o. Returns -1 on failure. This is equivalent to the Python statement: del o[key]. */ #define PyMapping_DelItemString(O,K) PyObject_DelItemString((O),(K)) /* implemented as a macro: int PyMapping_DelItem(PyObject *o, PyObject *key); Remove the mapping for object, key, from the object *o. Returns -1 on failure. This is equivalent to the Python statement: del o[key]. */ #define PyMapping_DelItem(O,K) PyObject_DelItem((O),(K)) PyAPI_FUNC(int) PyMapping_HasKeyString(PyObject *o, char *key); /* On success, return 1 if the mapping object has the key, key, and 0 otherwise. This is equivalent to the Python expression: o.has_key(key). This function always succeeds. */ PyAPI_FUNC(int) PyMapping_HasKey(PyObject *o, PyObject *key); /* Return 1 if the mapping object has the key, key, and 0 otherwise. This is equivalent to the Python expression: o.has_key(key). This function always succeeds. */ /* Implemented as macro: PyObject *PyMapping_Keys(PyObject *o); On success, return a list of the keys in object o. On failure, return NULL. This is equivalent to the Python expression: o.keys(). */ #define PyMapping_Keys(O) PyObject_CallMethod(O,"keys",NULL) /* Implemented as macro: PyObject *PyMapping_Values(PyObject *o); On success, return a list of the values in object o. On failure, return NULL. This is equivalent to the Python expression: o.values(). */ #define PyMapping_Values(O) PyObject_CallMethod(O,"values",NULL) /* Implemented as macro: PyObject *PyMapping_Items(PyObject *o); On success, return a list of the items in object o, where each item is a tuple containing a key-value pair. On failure, return NULL. This is equivalent to the Python expression: o.items(). */ #define PyMapping_Items(O) PyObject_CallMethod(O,"items",NULL) PyAPI_FUNC(PyObject *) PyMapping_GetItemString(PyObject *o, char *key); /* Return element of o corresponding to the object, key, or NULL on failure. This is the equivalent of the Python expression: o[key]. */ PyAPI_FUNC(int) PyMapping_SetItemString(PyObject *o, char *key, PyObject *value); /* Map the object, key, to the value, v. Returns -1 on failure. This is the equivalent of the Python statement: o[key]=v. */ PyAPI_FUNC(int) PyObject_IsInstance(PyObject *object, PyObject *typeorclass); /* isinstance(object, typeorclass) */ PyAPI_FUNC(int) PyObject_IsSubclass(PyObject *object, PyObject *typeorclass); /* issubclass(object, typeorclass) */ PyAPI_FUNC(int) _PyObject_RealIsInstance(PyObject *inst, PyObject *cls); PyAPI_FUNC(int) _PyObject_RealIsSubclass(PyObject *derived, PyObject *cls); /* For internal use by buffer API functions */ PyAPI_FUNC(void) _Py_add_one_to_index_F(int nd, Py_ssize_t *index, const Py_ssize_t *shape); PyAPI_FUNC(void) _Py_add_one_to_index_C(int nd, Py_ssize_t *index, const Py_ssize_t *shape); #ifdef __cplusplus } #endif #endif /* Py_ABSTRACTOBJECT_H */
17,300
2,542
<reponame>AnthonyM/service-fabric<filename>src/prod/src/ServiceModel/management/FaultAnalysisService/TestCommandProgressState.h // ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #pragma once namespace Management { namespace FaultAnalysisService { namespace TestCommandProgressState { enum Enum { Invalid = 0, Running = 1, RollingBack = 2, Completed = 3, Faulted = 4, Cancelled = 5, ForceCancelled = 6 }; void WriteToTextWriter(Common::TextWriter & w, Enum const & val); TestCommandProgressState::Enum FromPublicApi(FABRIC_TEST_COMMAND_PROGRESS_STATE const &); FABRIC_TEST_COMMAND_PROGRESS_STATE ToPublicApi(Enum const &); BEGIN_DECLARE_ENUM_JSON_SERIALIZER(Enum) ADD_ENUM_VALUE(Invalid) ADD_ENUM_VALUE(Running) ADD_ENUM_VALUE(RollingBack) ADD_ENUM_VALUE(Completed) ADD_ENUM_VALUE(Faulted) ADD_ENUM_VALUE(Cancelled) ADD_ENUM_VALUE(ForceCancelled) END_DECLARE_ENUM_SERIALIZER() }; } }
690
1,007
{ "name": "Second", "workflow": "second.py", "configuration": "config.py", "short_description": "Second test model", "long_description": "Second test model LOOONG textz", "author": "Dreadful VELES Team", "requires": ["veles>=0.5.0", "glymur"] }
111
530
/* * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The contents of this file are subject to the terms of either the Universal Permissive License * v 1.0 as shown at http://oss.oracle.com/licenses/upl * * or the following license: * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.openjdk.jmc.flightrecorder.configuration.internal; import java.util.HashMap; import java.util.Map; import org.openjdk.jmc.common.unit.IConstraint; import org.openjdk.jmc.common.unit.IDescribedMap; import org.openjdk.jmc.common.unit.IMutableConstrainedMap; import org.openjdk.jmc.common.unit.IOptionDescriptor; import org.openjdk.jmc.common.unit.MutableConstrainedMap; import org.openjdk.jmc.common.unit.UnitLookup; /** * General mutable {@link IOptionDescriptor} based {@link IDescribedMap} with possibility to add * constraints. * * @param <K> */ // FIXME: Rename to something more accurate. public class FixedDescriptorMap<K> extends MutableConstrainedMap<K> implements IDescribedMap<K> { private final IMapper<K, ? extends IOptionDescriptor<?>> mapper; protected final Map<K, IConstraint<?>> constraints; public FixedDescriptorMap(IMapper<K, ? extends IOptionDescriptor<?>> mapper) { this(mapper, new HashMap<K, Object>()); } protected FixedDescriptorMap(IMapper<K, ? extends IOptionDescriptor<?>> mapper, HashMap<K, Object> values) { this(mapper, values, new HashMap<K, IConstraint<?>>()); } protected FixedDescriptorMap(IMapper<K, ? extends IOptionDescriptor<?>> mapper, HashMap<K, Object> values, HashMap<K, IConstraint<?>> constraints) { super(values); this.mapper = mapper; this.constraints = constraints; } @Override public IMutableConstrainedMap<K> emptyWithSameConstraints() { return new FixedDescriptorMap<>(mapper, new HashMap<K, Object>(), new HashMap<>(constraints)); } @Override public IMutableConstrainedMap<K> mutableCopy() { return new FixedDescriptorMap<>(mapper, new HashMap<>(values), new HashMap<>(constraints)); } @Override public IConstraint<?> getConstraint(K key) { IOptionDescriptor<?> desc = mapper.get(key); return (desc != null) ? desc.getConstraint() : constraints.get(key); } @Override protected IConstraint<?> getSuggestedConstraint(K key) { IOptionDescriptor<?> desc = mapper.get(key); return (desc != null) ? null : UnitLookup.PLAIN_TEXT.getPersister(); } @Override public IOptionDescriptor<?> getDescribable(K key) { return mapper.get(key); } @Override protected void addConstraint(K key, IConstraint<?> constraint) { IOptionDescriptor<?> desc = mapper.get(key); if (desc != null) { throw new IllegalArgumentException("Key '" + key + "' is expressly prohibited in this map."); //$NON-NLS-1$ //$NON-NLS-2$ } constraints.put(key, constraint); } }
1,392
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/core/messaging/blink_transferable_message_struct_traits.h" #include "mojo/public/cpp/base/big_buffer_mojom_traits.h" #include "third_party/blink/renderer/core/imagebitmap/image_bitmap.h" #include "third_party/skia/include/core/SkBitmap.h" namespace mojo { namespace { scoped_refptr<blink::StaticBitmapImage> ToStaticBitmapImage( const SkBitmap& sk_bitmap) { auto handle = WTF::ArrayBufferContents::CreateDataHandle( sk_bitmap.computeByteSize(), WTF::ArrayBufferContents::kZeroInitialize); if (!handle) return nullptr; WTF::ArrayBufferContents array_buffer_contents( std::move(handle), WTF::ArrayBufferContents::kNotShared); if (!array_buffer_contents.Data()) return nullptr; SkImageInfo info = sk_bitmap.info(); if (!sk_bitmap.readPixels(info, array_buffer_contents.Data(), info.minRowBytes(), 0, 0, SkTransferFunctionBehavior::kIgnore)) return nullptr; return blink::StaticBitmapImage::Create(array_buffer_contents, info); } bool ToSkBitmap( const scoped_refptr<blink::StaticBitmapImage>& static_bitmap_image, SkBitmap& dest) { const sk_sp<SkImage> image = static_bitmap_image->PaintImageForCurrentFrame().GetSkImage(); return image && image->asLegacyBitmap( &dest, SkImage::LegacyBitmapMode::kRO_LegacyBitmapMode); } } // namespace Vector<SkBitmap> StructTraits<blink::mojom::blink::TransferableMessage::DataView, blink::BlinkTransferableMessage>:: image_bitmap_contents_array(const blink::BlinkCloneableMessage& input) { Vector<SkBitmap> out; out.ReserveInitialCapacity( input.message->GetImageBitmapContentsArray().size()); for (auto& bitmap_contents : input.message->GetImageBitmapContentsArray()) { SkBitmap bitmap; if (!ToSkBitmap(bitmap_contents, bitmap)) { return Vector<SkBitmap>(); } out.push_back(std::move(bitmap)); } return out; } bool StructTraits<blink::mojom::blink::TransferableMessage::DataView, blink::BlinkTransferableMessage>:: Read(blink::mojom::blink::TransferableMessage::DataView data, blink::BlinkTransferableMessage* out) { Vector<mojo::ScopedMessagePipeHandle> ports; blink::SerializedScriptValue::ArrayBufferContentsArray array_buffer_contents_array; Vector<SkBitmap> sk_bitmaps; if (!data.ReadMessage(static_cast<blink::BlinkCloneableMessage*>(out)) || !data.ReadArrayBufferContentsArray(&array_buffer_contents_array) || !data.ReadImageBitmapContentsArray(&sk_bitmaps) || !data.ReadPorts(&ports)) { return false; } out->ports.ReserveInitialCapacity(ports.size()); out->ports.AppendRange(std::make_move_iterator(ports.begin()), std::make_move_iterator(ports.end())); out->has_user_gesture = data.has_user_gesture(); out->message->SetArrayBufferContentsArray( std::move(array_buffer_contents_array)); array_buffer_contents_array.clear(); // Bitmaps are serialized in mojo as SkBitmaps to leverage existing // serialization logic, but SerializedScriptValue uses StaticBitmapImage, so // the SkBitmaps need to be converted to StaticBitmapImages. blink::SerializedScriptValue::ImageBitmapContentsArray image_bitmap_contents_array; for (auto& sk_bitmap : sk_bitmaps) { const scoped_refptr<blink::StaticBitmapImage> bitmap_contents = ToStaticBitmapImage(sk_bitmap); if (!bitmap_contents) { return false; } image_bitmap_contents_array.push_back(bitmap_contents); } out->message->SetImageBitmapContentsArray(image_bitmap_contents_array); return true; } bool StructTraits<blink::mojom::blink::SerializedArrayBufferContents::DataView, WTF::ArrayBufferContents>:: Read(blink::mojom::blink::SerializedArrayBufferContents::DataView data, WTF::ArrayBufferContents* out) { mojo_base::BigBufferView contents_view; if (!data.ReadContents(&contents_view)) return false; auto contents_data = contents_view.data(); auto handle = WTF::ArrayBufferContents::CreateDataHandle( contents_data.size(), WTF::ArrayBufferContents::kZeroInitialize); if (!handle) return false; WTF::ArrayBufferContents array_buffer_contents( std::move(handle), WTF::ArrayBufferContents::kNotShared); memcpy(array_buffer_contents.Data(), contents_data.data(), contents_data.size()); *out = std::move(array_buffer_contents); return true; } } // namespace mojo
1,754
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ash/policy/reporting/metrics_reporting/network/network_info_sampler.h" #include <string> #include <utility> #include "ash/constants/ash_features.h" #include "base/run_loop.h" #include "base/test/bind.h" #include "base/test/scoped_feature_list.h" #include "base/test/task_environment.h" #include "base/values.h" #include "chromeos/dbus/hermes/hermes_manager_client.h" #include "chromeos/dbus/shill/shill_device_client.h" #include "chromeos/network/network_handler.h" #include "chromeos/network/network_handler_test_helper.h" #include "chromeos/network/network_state_handler.h" #include "dbus/object_path.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/cros_system_api/dbus/shill/dbus-constants.h" namespace reporting { namespace { constexpr char kEid0[] = "1234"; constexpr char kEid1[] = "5678"; constexpr char kEthernetPath[] = "ethernet/path"; constexpr char kEthernetMac[] = "ethernet_mac"; constexpr char kWifiPath[] = "wifi/path"; constexpr char kWifiMac[] = "wifi_mac"; constexpr char kCellularPath[] = "cellular/path"; constexpr char kMeid[] = "12343"; constexpr char kImei[] = "5689"; constexpr char kIccid[] = "9876563"; constexpr char kMdn[] = "134345"; class NetworkInfoSamplerTest : public ::testing::Test { protected: NetworkInfoSamplerTest() = default; NetworkInfoSamplerTest(const NetworkInfoSamplerTest&) = delete; NetworkInfoSamplerTest& operator=(const NetworkInfoSamplerTest&) = delete; ~NetworkInfoSamplerTest() override = default; void SetUp() override { device_client_ = network_handler_test_helper_.device_test(); device_client_->ClearDevices(); } ::ash::ShillDeviceClient::TestInterface* device_client_; private: base::test::TaskEnvironment task_environment_; ::ash::NetworkHandlerTestHelper network_handler_test_helper_; }; TEST_F(NetworkInfoSamplerTest, AllTypes) { base::test::ScopedFeatureList scoped_feature_list; scoped_feature_list.InitAndEnableFeature(ash::features::kESimPolicy); ::chromeos::HermesManagerClient::Get()->GetTestInterface()->AddEuicc( dbus::ObjectPath("path0"), kEid0, true, 1); ::chromeos::HermesManagerClient::Get()->GetTestInterface()->AddEuicc( dbus::ObjectPath("path1"), kEid1, true, 2); device_client_->AddDevice(kEthernetPath, shill::kTypeEthernet, "ethernet"); device_client_->SetDeviceProperty(kEthernetPath, shill::kAddressProperty, base::Value(kEthernetMac), /*notify_changed=*/true); device_client_->AddDevice(kWifiPath, shill::kTypeWifi, "wifi"); device_client_->SetDeviceProperty(kWifiPath, shill::kAddressProperty, base::Value(kWifiMac), /*notify_changed=*/true); device_client_->AddDevice(kCellularPath, shill::kTypeCellular, "cellular"); device_client_->SetDeviceProperty(kCellularPath, shill::kMeidProperty, base::Value(kMeid), /*notify_changed=*/true); device_client_->SetDeviceProperty(kCellularPath, shill::kImeiProperty, base::Value(kImei), /*notify_changed=*/true); device_client_->SetDeviceProperty(kCellularPath, shill::kIccidProperty, base::Value(kIccid), /*notify_changed=*/true); device_client_->SetDeviceProperty(kCellularPath, shill::kMdnProperty, base::Value(kMdn), /*notify_changed=*/true); base::RunLoop().RunUntilIdle(); MetricData result; NetworkInfoSampler sampler; sampler.Collect(base::BindLambdaForTesting( [&](MetricData metric_data) { result = std::move(metric_data); })); ASSERT_TRUE(result.has_info_data()); ASSERT_TRUE(result.info_data().has_networks_info()); ASSERT_EQ(result.info_data().networks_info().network_interfaces_size(), 3); // Ethernet. EXPECT_EQ(result.info_data().networks_info().network_interfaces(0).type(), NetworkDeviceType::ETHERNET_DEVICE); EXPECT_EQ( result.info_data().networks_info().network_interfaces(0).mac_address(), kEthernetMac); EXPECT_EQ( result.info_data().networks_info().network_interfaces(0).device_path(), kEthernetPath); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(0).has_meid()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(0).has_imei()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(0).has_iccid()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(0).has_mdn()); EXPECT_TRUE( result.info_data().networks_info().network_interfaces(0).eids().empty()); // Wifi. EXPECT_EQ(result.info_data().networks_info().network_interfaces(1).type(), NetworkDeviceType::WIFI_DEVICE); EXPECT_EQ( result.info_data().networks_info().network_interfaces(1).mac_address(), kWifiMac); EXPECT_EQ( result.info_data().networks_info().network_interfaces(1).device_path(), kWifiPath); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(1).has_meid()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(1).has_imei()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(1).has_iccid()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(1).has_mdn()); EXPECT_TRUE( result.info_data().networks_info().network_interfaces(1).eids().empty()); // Cellular. EXPECT_EQ(result.info_data().networks_info().network_interfaces(2).type(), NetworkDeviceType::CELLULAR_DEVICE); EXPECT_EQ(result.info_data().networks_info().network_interfaces(2).meid(), kMeid); EXPECT_EQ(result.info_data().networks_info().network_interfaces(2).imei(), kImei); EXPECT_EQ(result.info_data().networks_info().network_interfaces(2).iccid(), kIccid); EXPECT_EQ(result.info_data().networks_info().network_interfaces(2).mdn(), kMdn); EXPECT_EQ( result.info_data().networks_info().network_interfaces(2).device_path(), kCellularPath); EXPECT_FALSE(result.info_data() .networks_info() .network_interfaces(2) .has_mac_address()); ASSERT_EQ( result.info_data().networks_info().network_interfaces(2).eids_size(), 2); EXPECT_EQ(result.info_data().networks_info().network_interfaces(2).eids(0), kEid0); EXPECT_EQ(result.info_data().networks_info().network_interfaces(2).eids(1), kEid1); } TEST_F(NetworkInfoSamplerTest, Cellular_ESimPolicyDisabled) { base::test::ScopedFeatureList scoped_feature_list; scoped_feature_list.InitAndDisableFeature(ash::features::kESimPolicy); ::chromeos::HermesManagerClient::Get()->GetTestInterface()->AddEuicc( dbus::ObjectPath("path1"), kEid0, true, 1); device_client_->AddDevice(kCellularPath, shill::kTypeCellular, "cellular"); device_client_->SetDeviceProperty(kCellularPath, shill::kMeidProperty, base::Value(kMeid), /*notify_changed=*/true); base::RunLoop().RunUntilIdle(); MetricData result; NetworkInfoSampler sampler; sampler.Collect(base::BindLambdaForTesting( [&](MetricData metric_data) { result = std::move(metric_data); })); ASSERT_TRUE(result.has_info_data()); ASSERT_TRUE(result.info_data().has_networks_info()); ASSERT_EQ(result.info_data().networks_info().network_interfaces_size(), 1); EXPECT_EQ(result.info_data().networks_info().network_interfaces(0).type(), NetworkDeviceType::CELLULAR_DEVICE); EXPECT_EQ(result.info_data().networks_info().network_interfaces(0).meid(), kMeid); EXPECT_EQ( result.info_data().networks_info().network_interfaces(0).device_path(), kCellularPath); EXPECT_FALSE(result.info_data() .networks_info() .network_interfaces(0) .has_mac_address()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(0).has_imei()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(0).has_iccid()); EXPECT_FALSE( result.info_data().networks_info().network_interfaces(0).has_mdn()); // No eid reported, feature is disabled. EXPECT_TRUE( result.info_data().networks_info().network_interfaces(0).eids().empty()); } } // namespace } // namespace reporting
3,721
14,668
<reponame>chromium/chromium // Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ash/input_method/multi_word_suggester.h" #include <cmath> #include "ash/services/ime/public/cpp/suggestions.h" #include "base/metrics/histogram_functions.h" #include "base/strings/string_util.h" #include "base/strings/utf_string_conversions.h" #include "chrome/browser/ash/input_method/ui/suggestion_details.h" #include "ui/events/keycodes/dom/dom_code.h" namespace ash { namespace input_method { namespace { using ::chromeos::ime::TextSuggestion; using ::chromeos::ime::TextSuggestionMode; using ::chromeos::ime::TextSuggestionType; constexpr char16_t kSuggestionShownMessage[] = u"predictive writing candidate shown, press tab to accept"; constexpr char16_t kSuggestionAcceptedMessage[] = u"predictive writing candidate inserted"; constexpr char16_t kSuggestionDismissedMessage[] = u"predictive writing candidate dismissed"; absl::optional<TextSuggestion> GetMultiWordSuggestion( const std::vector<TextSuggestion>& suggestions) { if (suggestions.empty()) return absl::nullopt; if (suggestions[0].type == TextSuggestionType::kMultiWord) { // There should only ever be one multi word suggestion given at a time. DCHECK_EQ(suggestions.size(), 1); return suggestions[0]; } return absl::nullopt; } size_t CalculateConfirmedLength(const std::u16string& surrounding_text, const std::u16string& suggestion_text) { if (surrounding_text.empty() || suggestion_text.empty()) return 0; for (size_t i = suggestion_text.length(); i >= 1; i--) { if (base::EndsWith(surrounding_text, suggestion_text.substr(0, i))) { return i; } } return 0; } void RecordTimeToAccept(base::TimeDelta delta) { base::UmaHistogramTimes("InputMethod.Assistive.TimeToAccept.MultiWord", delta); } void RecordTimeToDismiss(base::TimeDelta delta) { base::UmaHistogramTimes("InputMethod.Assistive.TimeToDismiss.MultiWord", delta); } // TODO(crbug/1146266): Add DismissedAccuracy metric back in. } // namespace MultiWordSuggester::MultiWordSuggester( SuggestionHandlerInterface* suggestion_handler) : suggestion_handler_(suggestion_handler), state_(this) { suggestion_button_.id = ui::ime::ButtonId::kSuggestion; suggestion_button_.window_type = ui::ime::AssistiveWindowType::kMultiWordSuggestion; suggestion_button_.index = 0; } MultiWordSuggester::~MultiWordSuggester() = default; void MultiWordSuggester::OnFocus(int context_id) { focused_context_id_ = context_id; state_.ResetSuggestion(); } void MultiWordSuggester::OnBlur() { focused_context_id_ = 0; state_.ResetSuggestion(); } void MultiWordSuggester::OnSurroundingTextChanged(const std::u16string& text, size_t cursor_pos, size_t anchor_pos) { auto surrounding_text = SuggestionState::SurroundingText{ .text = text, .cursor_at_end_of_text = (cursor_pos == anchor_pos && cursor_pos == text.length())}; state_.UpdateSurroundingText(surrounding_text); DisplaySuggestionIfAvailable(); } void MultiWordSuggester::OnExternalSuggestionsUpdated( const std::vector<TextSuggestion>& suggestions) { if (state_.IsSuggestionShowing() || !state_.IsCursorAtEndOfText()) return; absl::optional<TextSuggestion> multi_word_suggestion = GetMultiWordSuggestion(suggestions); if (!multi_word_suggestion) { state_.UpdateState(SuggestionState::State::kNoSuggestionShown); return; } auto suggestion = SuggestionState::Suggestion{ .mode = multi_word_suggestion->mode, .text = base::UTF8ToUTF16(multi_word_suggestion->text), .time_first_shown = base::TimeTicks::Now()}; state_.UpdateSuggestion(suggestion); DisplaySuggestionIfAvailable(); } SuggestionStatus MultiWordSuggester::HandleKeyEvent(const ui::KeyEvent& event) { if (!state_.IsSuggestionShowing()) return SuggestionStatus::kNotHandled; switch (event.code()) { case ui::DomCode::TAB: AcceptSuggestion(); return SuggestionStatus::kAccept; case ui::DomCode::ARROW_DOWN: if (state_.IsSuggestionHighlighted()) return SuggestionStatus::kNotHandled; state_.ToggleSuggestionHighlight(); SetSuggestionHighlight(true); return SuggestionStatus::kBrowsing; case ui::DomCode::ARROW_UP: if (!state_.IsSuggestionHighlighted()) return SuggestionStatus::kNotHandled; state_.ToggleSuggestionHighlight(); SetSuggestionHighlight(false); return SuggestionStatus::kBrowsing; case ui::DomCode::ENTER: if (!state_.IsSuggestionHighlighted()) return SuggestionStatus::kNotHandled; AcceptSuggestion(); return SuggestionStatus::kAccept; default: return SuggestionStatus::kNotHandled; } } bool MultiWordSuggester::Suggest(const std::u16string& text, size_t cursor_pos, size_t anchor_pos) { return state_.IsSuggestionShowing(); } bool MultiWordSuggester::AcceptSuggestion(size_t index) { std::string error; suggestion_handler_->AcceptSuggestion(focused_context_id_, &error); if (!error.empty()) { LOG(ERROR) << "suggest: failed to accept suggestion - " << error; return false; } auto suggestion = state_.GetSuggestion(); if (suggestion) { RecordTimeToAccept(base::TimeTicks::Now() - suggestion->time_first_shown); } state_.UpdateState(SuggestionState::State::kSuggestionAccepted); state_.ResetSuggestion(); return true; } void MultiWordSuggester::DismissSuggestion() { std::string error; suggestion_handler_->DismissSuggestion(focused_context_id_, &error); if (!error.empty()) { LOG(ERROR) << "suggest: Failed to dismiss suggestion - " << error; return; } auto suggestion = state_.GetSuggestion(); if (suggestion) { RecordTimeToDismiss(base::TimeTicks::Now() - suggestion->time_first_shown); } state_.UpdateState(SuggestionState::State::kSuggestionDismissed); state_.ResetSuggestion(); } AssistiveType MultiWordSuggester::GetProposeActionType() { return state_.GetLastSuggestionType(); } bool MultiWordSuggester::HasSuggestions() { return false; } std::vector<TextSuggestion> MultiWordSuggester::GetSuggestions() { return {}; } void MultiWordSuggester::DisplaySuggestionIfAvailable() { auto suggestion_to_display = state_.GetSuggestion(); if (suggestion_to_display.has_value()) DisplaySuggestion(*suggestion_to_display); } void MultiWordSuggester::DisplaySuggestion( const SuggestionState::Suggestion& suggestion) { ui::ime::SuggestionDetails details; details.text = suggestion.text; details.show_accept_annotation = false; details.show_quick_accept_annotation = true; details.confirmed_length = suggestion.confirmed_length; details.show_setting_link = false; std::string error; suggestion_handler_->SetSuggestion(focused_context_id_, details, &error); if (!error.empty()) { LOG(ERROR) << "suggest: Failed to show suggestion in assistive framework" << " - " << error; } } void MultiWordSuggester::SetSuggestionHighlight(bool highlighted) { std::string error; suggestion_handler_->SetButtonHighlighted( focused_context_id_, suggestion_button_, highlighted, &error); if (!error.empty()) { LOG(ERROR) << "Failed to set button highlighted. " << error; } } void MultiWordSuggester::Announce(const std::u16string& message) { if (suggestion_handler_) { suggestion_handler_->Announce(message); } } MultiWordSuggester::SuggestionState::SuggestionState( MultiWordSuggester* suggester) : suggester_(suggester) {} MultiWordSuggester::SuggestionState::~SuggestionState() = default; void MultiWordSuggester::SuggestionState::UpdateState(const State& state) { if (state == State::kPredictionSuggestionShown) { last_suggestion_type_ = AssistiveType::kMultiWordPrediction; } if (state == State::kCompletionSuggestionShown) { last_suggestion_type_ = AssistiveType::kMultiWordCompletion; } if (state_ == State::kNoSuggestionShown && (state == State::kPredictionSuggestionShown || state == State::kCompletionSuggestionShown)) { suggester_->Announce(kSuggestionShownMessage); } if ((state_ == State::kPredictionSuggestionShown || state_ == State::kCompletionSuggestionShown || state_ == State::kTrackingLastSuggestionShown) && state == State::kSuggestionAccepted) { suggester_->Announce(kSuggestionAcceptedMessage); } if ((state_ == State::kPredictionSuggestionShown || state_ == State::kCompletionSuggestionShown || state_ == State::kTrackingLastSuggestionShown) && state == State::kSuggestionDismissed) { suggester_->Announce(kSuggestionDismissedMessage); } state_ = state; } void MultiWordSuggester::SuggestionState::UpdateSurroundingText( const MultiWordSuggester::SuggestionState::SurroundingText& surrounding_text) { surrounding_text_ = surrounding_text; ReconcileSuggestionWithText(); } void MultiWordSuggester::SuggestionState::UpdateSuggestion( const MultiWordSuggester::SuggestionState::Suggestion& suggestion) { suggestion_ = suggestion; UpdateState(suggestion.mode == TextSuggestionMode::kCompletion ? State::kCompletionSuggestionShown : State::kPredictionSuggestionShown); if (suggestion.mode == TextSuggestionMode::kCompletion) ReconcileSuggestionWithText(); } void MultiWordSuggester::SuggestionState::ReconcileSuggestionWithText() { if (!suggestion_) return; size_t new_confirmed_length = CalculateConfirmedLength(surrounding_text_.text, suggestion_->text); // Save the calculated confirmed length on first showing of a completion // suggestion. This will be used later when determining if a suggestion // should be dismissed or not. auto initial_confirmed_length = state_ == State::kCompletionSuggestionShown ? new_confirmed_length : suggestion_->initial_confirmed_length; // Are we still tracking the last suggestion shown to the user? bool no_longer_tracking = state_ == State::kTrackingLastSuggestionShown && (new_confirmed_length == 0 || new_confirmed_length < suggestion_->initial_confirmed_length); if (no_longer_tracking || !surrounding_text_.cursor_at_end_of_text) { UpdateState(State::kSuggestionDismissed); ResetSuggestion(); return; } if (state_ == State::kPredictionSuggestionShown || state_ == State::kCompletionSuggestionShown) { UpdateState(State::kTrackingLastSuggestionShown); } suggestion_ = Suggestion{.text = suggestion_->text, .confirmed_length = new_confirmed_length, .initial_confirmed_length = initial_confirmed_length, .time_first_shown = suggestion_->time_first_shown}; } void MultiWordSuggester::SuggestionState::ToggleSuggestionHighlight() { if (!suggestion_) return; suggestion_->highlighted = !suggestion_->highlighted; } bool MultiWordSuggester::SuggestionState::IsSuggestionHighlighted() { if (!suggestion_) return false; return suggestion_->highlighted; } bool MultiWordSuggester::SuggestionState::IsSuggestionShowing() { return (state_ == State::kPredictionSuggestionShown || state_ == State::kCompletionSuggestionShown || state_ == State::kTrackingLastSuggestionShown); } bool MultiWordSuggester::SuggestionState::IsCursorAtEndOfText() { return surrounding_text_.cursor_at_end_of_text; } absl::optional<MultiWordSuggester::SuggestionState::Suggestion> MultiWordSuggester::SuggestionState::GetSuggestion() { return suggestion_; } void MultiWordSuggester::SuggestionState::ResetSuggestion() { suggestion_ = absl::nullopt; UpdateState(State::kNoSuggestionShown); } AssistiveType MultiWordSuggester::SuggestionState::GetLastSuggestionType() { return last_suggestion_type_; } } // namespace input_method } // namespace ash
4,479
2,479
# -*- coding: utf-8 -*- """ Client Plaintext Upgrade ~~~~~~~~~~~~~~~~~~~~~~~~ This example code fragment demonstrates how to set up a HTTP/2 client that uses the plaintext HTTP Upgrade mechanism to negotiate HTTP/2 connectivity. For maximum explanatory value it uses the synchronous socket API that comes with the Python standard library. In product code you will want to use an actual HTTP/1.1 client if possible. This code requires Python 3.5 or later. """ import h2.connection import socket def establish_tcp_connection(): """ This function establishes a client-side TCP connection. How it works isn't very important to this example. For the purpose of this example we connect to localhost. """ return socket.create_connection(('localhost', 80)) def send_initial_request(connection, settings): """ For the sake of this upgrade demonstration, we're going to issue a GET request against the root of the site. In principle the best request to issue for an upgrade is actually ``OPTIONS *``, but this is remarkably poorly supported and can break in weird ways. """ # Craft our initial request per RFC 7540 Section 3.2. This requires two # special header fields: the Upgrade headre, and the HTTP2-Settings header. # The value of the HTTP2-Settings header field comes from h2. request = ( b"GET / HTTP/1.1\r\n" + b"Host: localhost\r\n" + b"Upgrade: h2c\r\n" + b"HTTP2-Settings: " + settings + b"\r\n" + b"\r\n" ) connection.sendall(request) def get_upgrade_response(connection): """ This function reads from the socket until the HTTP/1.1 end-of-headers sequence (CRLFCRLF) is received. It then checks what the status code of the response is. This is not a substitute for proper HTTP/1.1 parsing, but it's good enough for example purposes. """ data = b'' while b'\r\n\r\n' not in data: data += connection.recv(8192) headers, rest = data.split(b'\r\n\r\n', 1) # An upgrade response begins HTTP/1.1 101 Switching Protocols. Look for the # code. In production code you should also check that the upgrade is to # h2c, but here we know we only offered one upgrade so there's only one # possible upgrade in use. split_headers = headers.split() if split_headers[1] != b'101': raise RuntimeError("Not upgrading!") # We don't care about the HTTP/1.1 data anymore, but we do care about # any other data we read from the socket: this is going to be HTTP/2 data # that must be passed to the H2Connection. return rest def main(): """ The client upgrade flow. """ # Step 1: Establish the TCP connecton. connection = establish_tcp_connection() # Step 2: Create H2 Connection object, put it in upgrade mode, and get the # value of the HTTP2-Settings header we want to use. h2_connection = h2.connection.H2Connection() settings_header_value = h2_connection.initiate_upgrade_connection() # Step 3: Send the initial HTTP/1.1 request with the upgrade fields. send_initial_request(connection, settings_header_value) # Step 4: Read the HTTP/1.1 response, look for 101 response. extra_data = get_upgrade_response(connection) # Step 5: Immediately send the pending HTTP/2 data. connection.sendall(h2_connection.data_to_send()) # Step 6: Feed the body data to the connection. events = connection.receive_data(extra_data) # Now you can enter your main loop, beginning by processing the first set # of events above. These events may include ResponseReceived, which will # contain the response to the request we made in Step 3. main_loop(events)
1,200
668
package com.tal.wangxiao.conan.common.service.impl; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.tal.wangxiao.conan.common.mapper.DiffDetailMapper; import com.tal.wangxiao.conan.common.domain.DiffDetail; import com.tal.wangxiao.conan.common.service.DiffDetailService; /** * 比对接口关系表Service业务层处理 * * @author dengkunan * @date 2021-01-08 */ @Service public class DiffDetailServiceImpl implements DiffDetailService { @Autowired private DiffDetailMapper diffDetailMapper; /** * 查询比对接口关系表 * * @param id 比对接口关系表ID * @return 比对接口关系表 */ @Override public DiffDetail selectDiffDetailById(Integer id) { return diffDetailMapper.selectDiffDetailById(id); } /** * 查询比对接口关系表列表 * * @param diffDetail 比对接口关系表 * @return 比对接口关系表 */ @Override public List<DiffDetail> selectDiffDetailList(DiffDetail diffDetail) { return diffDetailMapper.selectDiffDetailList(diffDetail); } /** * 新增比对接口关系表 * * @param diffDetail 比对接口关系表 * @return 结果 */ @Override public int insertDiffDetail(DiffDetail diffDetail) { return diffDetailMapper.insertDiffDetail(diffDetail); } /** * 修改比对接口关系表 * * @param diffDetail 比对接口关系表 * @return 结果 */ @Override public int updateDiffDetail(DiffDetail diffDetail) { return diffDetailMapper.updateDiffDetail(diffDetail); } /** * 批量删除比对接口关系表 * * @param ids 需要删除的比对接口关系表ID * @return 结果 */ @Override public int deleteDiffDetailByIds(Integer[] ids) { return diffDetailMapper.deleteDiffDetailByIds(ids); } /** * 删除比对接口关系表信息 * * @param id 比对接口关系表ID * @return 结果 */ @Override public int deleteDiffDetailById(Integer id) { return diffDetailMapper.deleteDiffDetailById(id); } }
1,120
1,025
<gh_stars>1000+ //================================================================================== // Copyright (c) 2016 , Advanced Micro Devices, Inc. All rights reserved. // /// \author AMD Developer Tools Team /// \file gdSaveProjectCommand.h /// //================================================================================== //------------------------------ gdSaveProjectCommand.h ------------------------------ #ifndef __GDSAVEPROJECTCOMMAND #define __GDSAVEPROJECTCOMMAND // Infra: #include <AMDTBaseTools/Include/gtString.h> #include <AMDTOSWrappers/Include/osDirectory.h> #include <AMDTOSWrappers/Include/osFilePath.h> #include <AMDTAPIClasses/Include/apCounterScope.h> // AMDTApplicationFramework: #include <AMDTApplicationFramework/Include/afCommand.h> // ---------------------------------------------------------------------------------- // Class Name: gdSaveProjectCommand : public afCommand // General Description: // Save project Command - Saves the current configuration into a .gdb file. // Author: <NAME> // Creation Date: 6/11/2003 // ---------------------------------------------------------------------------------- class GD_API gdSaveProjectCommand : public afCommand { public: gdSaveProjectCommand(); virtual ~gdSaveProjectCommand(); // Overrides afCommand: virtual bool canExecuteSpecificCommand(); virtual bool executeSpecificCommand(); bool getXMLOutputString(gtString& xmlOutputString); private: bool GetProjectSettings(); bool initXMLDebuggerTextStructure(TiXmlDocument& xmlDoc); bool setDebuggerData(TiXmlHandle& docHandle, const gtString& dataFieldName, const gtString& dataFieldValue); bool setDebugOutputData(TiXmlHandle& docHandle, const gtString& dataFieldName, const gtString& dataFieldValue); bool saveBreakpointsData(TiXmlHandle& docHandle); bool saveStateVariablesData(TiXmlHandle& docHandle); bool writeStringToXml(TiXmlNode* pXmlNode, const gtString& dataFieldValue); bool saveDebugSettingsAsString(); bool addBreakpointProperty(TiXmlNode* pBreakpointNode, const gtString& breakpointPropertyNode, const gtString& dataFieldValue); bool setBreakpointData(TiXmlNode* pBreakpointsXMLNode, gtString breakpointTypeStr, gtString breakpointStringValue, bool isBreakpointEnabled, int lineNumber = -1); bool setOpenGLStateVariableData(TiXmlNode* pStateVariablesNode, const gtString& functionName); private: // The project settings for save: apDebugProjectSettings m_projectSettings; // OpenGL debug output: gtString _glDebugOutputLoggingEnabledString; gtString _glDebugOutputBreakOnReportsString; gtString _debugOutputMessagesMaskString; gtString _debugOutputSeverityString; // Contain the XML output as string: gtString m_xmlOutputString; }; #endif // __GDSAVEPROJECTCOMMAND
849
304
<reponame>NoahFetz/CloudNet-v3 /* * Copyright 2019-2021 CloudNetService team & contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.dytanic.cloudnet.common.logging; import org.jetbrains.annotations.NotNull; /** * A LogHandler should handle an incoming LogEntry. The Operation can be execute asynchronously or synchronously. It * depends on the implementation of the logger and the setting of the LogLevel */ public interface ILogHandler extends AutoCloseable { /** * Allows to handle this incoming LogEntry from the logger This method can invoked asynchronously or synchronously. It * depends on the implementation of the logger and the setting of the LogLevel * * @param logEntry the new incoming log entry */ void handle(@NotNull LogEntry logEntry) throws Exception; }
351
944
import unittest import trtorch import torch import torchvision.models as models from model_test_case import ModelTestCase class TestMultiGpuSwitching(ModelTestCase): def setUp(self): if torch.cuda.device_count() < 2: self.fail("Test is not relevant for this platform since number of available CUDA devices is less than 2") trtorch.set_device(0) self.target_gpu = 1 self.input = torch.randn((1, 3, 224, 224)).to("cuda:1") self.model = self.model.to("cuda:1") self.traced_model = torch.jit.trace(self.model, [self.input]) self.scripted_model = torch.jit.script(self.model) def test_compile_traced(self): trtorch.set_device(0) compile_spec = { "inputs": [trtorch.Input(self.input.shape)], "device": { "device_type": trtorch.DeviceType.GPU, "gpu_id": self.target_gpu, "dla_core": 0, "allow_gpu_fallback": False, "disable_tf32": False } } trt_mod = trtorch.compile(self.traced_model, **compile_spec) trtorch.set_device(self.target_gpu) same = (trt_mod(self.input) - self.traced_model(self.input)).abs().max() trtorch.set_device(0) self.assertTrue(same < 2e-3) def test_compile_script(self): trtorch.set_device(0) compile_spec = { "inputs": [trtorch.Input(self.input.shape)], "device": { "device_type": trtorch.DeviceType.GPU, "gpu_id": self.target_gpu, "dla_core": 0, "allow_gpu_fallback": False, "disable_tf32": False } } trt_mod = trtorch.compile(self.scripted_model, **compile_spec) trtorch.set_device(self.target_gpu) same = (trt_mod(self.input) - self.scripted_model(self.input)).abs().max() trtorch.set_device(0) self.assertTrue(same < 2e-3) class TestMultiGpuSerializeDeserializeSwitching(ModelTestCase): def setUp(self): if torch.cuda.device_count() < 2: self.fail("Test is not relevant for this platform since number of available CUDA devices is less than 2") self.target_gpu = 0 trtorch.set_device(0) self.input = torch.randn((1, 3, 224, 224)).to("cuda:0") self.model = self.model.to("cuda:0") self.traced_model = torch.jit.trace(self.model, [self.input]) self.scripted_model = torch.jit.script(self.model) def test_compile_traced(self): trtorch.set_device(0) compile_spec = { "inputs": [trtorch.Input(self.input.shape)], "device": { "device_type": trtorch.DeviceType.GPU, "gpu_id": self.target_gpu, "dla_core": 0, "allow_gpu_fallback": False, "disable_tf32": False } } trt_mod = trtorch.compile(self.traced_model, **compile_spec) # Changing the device ID deliberately. It should still run on correct device ID by context switching trtorch.set_device(1) same = (trt_mod(self.input) - self.traced_model(self.input)).abs().max() self.assertTrue(same < 2e-3) def test_compile_script(self): trtorch.set_device(0) compile_spec = { "inputs": [trtorch.Input(self.input.shape)], "device": { "device_type": trtorch.DeviceType.GPU, "gpu_id": self.target_gpu, "dla_core": 0, "allow_gpu_fallback": False, "disable_tf32": False } } trt_mod = trtorch.compile(self.scripted_model, **compile_spec) # Changing the device ID deliberately. It should still run on correct device ID by context switching trtorch.set_device(1) same = (trt_mod(self.input) - self.scripted_model(self.input)).abs().max() self.assertTrue(same < 2e-3) def test_suite(): suite = unittest.TestSuite() suite.addTest(TestMultiGpuSwitching.parametrize(TestMultiGpuSwitching, model=models.resnet18(pretrained=True))) suite.addTest( TestMultiGpuSerializeDeserializeSwitching.parametrize(TestMultiGpuSwitching, model=models.resnet18(pretrained=True))) return suite suite = test_suite() runner = unittest.TextTestRunner() result = runner.run(suite) exit(int(not result.wasSuccessful()))
2,213
577
#! /usr/bin/env python3 # -*- coding: utf-8 -*- """Test for frame stacking.""" import importlib import math import numpy as np import pytest from neural_sp.models.torch_utils import np2tensor from neural_sp.models.torch_utils import pad_list def make_args(**kwargs): args = dict( n_stacks=1, n_skips=1, ) args.update(kwargs) return args @pytest.mark.parametrize( "args", [ ({'n_stacks': 1, 'n_skips': 1}), ({'n_stacks': 2, 'n_skips': 2}), ({'n_stacks': 3, 'n_skips': 3}), ({'n_stacks': 3, 'n_skips': 1}), ] ) def test_forward(args): args = make_args(**args) batch_size = 4 xmax = 40 input_dim = 80 device = "cpu" xs = [np.random.randn(xlen, input_dim).astype(np.float32) for xlen in range(xmax - batch_size, xmax)] xs_pad = pad_list([np2tensor(x, device).float() for x in xs], 0.) module = importlib.import_module('neural_sp.models.seq2seq.frontends.frame_stacking') out = [module.stack_frame(x, args['n_stacks'], args['n_skips']) for x in xs] out_pad = pad_list([np2tensor(x, device).float() for x in out], 0.) assert out_pad.size(0) == xs_pad.size(0) assert out_pad.size(1) == math.ceil(xs_pad.size(1) / args['n_skips']) assert out_pad.size(2) == xs_pad.size(2) * args['n_stacks']
637
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.libs.git; import org.netbeans.libs.git.jgit.Utils; /** * When there is a merge conflict in a file from a repository, the file's status * provides instance of this class and you can get more information about the conflict. * Can be acquired with {@link GitStatus#getConflictDescriptor() } method. * * @author <NAME> */ public final class GitConflictDescriptor { private Type type; GitConflictDescriptor (Type type) { this.type = type; } public static enum Type { /** * Deleted in both branches. */ BOTH_DELETED { @Override public String getDescription () { return Utils.getBundle(GitConflictDescriptor.class).getString("MSG_GitConflictDescriptor_BOTH_DELETED.desc"); //NOI18N } @Override public String toString () { return "Deleted by both"; //NOI18N } }, /** * Added by us */ ADDED_BY_US { @Override public String getDescription () { return Utils.getBundle(GitConflictDescriptor.class).getString("MSG_GitConflictDescriptor_ADDED_BY_US.desc"); //NOI18N } @Override public String toString () { return "Added by us"; //NOI18N } }, /** * Modified but deleted in other branch */ DELETED_BY_THEM { @Override public String getDescription () { return Utils.getBundle(GitConflictDescriptor.class).getString("MSG_GitConflictDescriptor_DELETED_BY_THEM.desc"); //NOI18N } @Override public String toString () { return "Deleted by them"; //NOI18N } }, /** * Added by them */ ADDED_BY_THEM { @Override public String getDescription () { return Utils.getBundle(GitConflictDescriptor.class).getString("MSG_GitConflictDescriptor_ADDED_BY_THEM.desc"); //NOI18N } @Override public String toString () { return "Added by them"; //NOI18N } }, /** * Deleted and modified in other branch */ DELETED_BY_US { @Override public String getDescription () { return Utils.getBundle(GitConflictDescriptor.class).getString("MSG_GitConflictDescriptor_DELETED_BY_US.desc"); //NOI18N } @Override public String toString () { return "Deleted by us"; //NOI18N } }, /** * Added in two branches simultaneously */ BOTH_ADDED { @Override public String getDescription () { return Utils.getBundle(GitConflictDescriptor.class).getString("MSG_GitConflictDescriptor_BOTH_ADDED.desc"); //NOI18N } @Override public String toString () { return "Added by both"; //NOI18N } }, /** * Modified in two branches simultaneously */ BOTH_MODIFIED { @Override public String getDescription () { return Utils.getBundle(GitConflictDescriptor.class).getString("MSG_GitConflictDescriptor_BOTH_MODIFIED.desc"); //NOI18N } @Override public String toString () { return "Modified by both"; //NOI18N } }; public abstract String getDescription (); } /** * @return type of the merge conflict */ public Type getType () { return type; } }
2,107
324
#include <collision_avoidance_pick_and_place/pick_and_place.h> using moveit::planning_interface::MoveItErrorCode; /* MOVING ARM TO WAIT POSITION Goal: - Use the "move_group" interface to move the robot to the "wait" target. - Observe how we verify that the move was completed Hints: - "cfg.WAIT_POSE_NAME" contains the name of the wait target. - Once the target is set you can call the "move" method in order to go to that target. */ void collision_avoidance_pick_and_place::PickAndPlace::move_to_wait_position() { //ROS_ERROR_STREAM("move_to_wait_position is not implemented yet. Aborting."); exit(1); // task variables MoveItErrorCode error; // saves the move result /* Fill Code: * Goal: * - Set robot wait target * Hints: * - Use the "setNamedTarget" method in the "move_group_ptr" object. * - Look in the "cfg.WAIT_POSE_NAME" object for the name of the target. */ move_group_ptr->setNamedTarget(cfg.WAIT_POSE_NAME); // set allowed planning time move_group_ptr->setPlanningTime(60.0f); /* Fill Code: * Goal: * - Move the robot * Hints: * - Use the "move" method in the "move_group_ptr" object and save the result * in the "error" variable */ error = move_group_ptr->move(); if(error == MoveItErrorCode::SUCCESS) { ROS_INFO_STREAM("Move " << cfg.WAIT_POSE_NAME<< " Succeeded"); } else { ROS_ERROR_STREAM("Move " << cfg.WAIT_POSE_NAME<< " Failed"); exit(1); } }
534
3,348
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.heron.uploader.hdfs; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.apache.heron.spi.common.Config; import org.apache.heron.spi.uploader.UploaderException; public class HdfsUploaderTest { private HdfsUploader uploader; private HdfsController controller; @Before public void setUp() throws Exception { Config config = Mockito.mock(Config.class); // Insert mock HdfsController uploader = Mockito.spy(new HdfsUploader()); controller = Mockito.mock(HdfsController.class); Mockito.doReturn(controller).when(uploader).getHdfsController(); uploader.initialize(config); } @After public void after() throws Exception { } @Test(expected = UploaderException.class) public void testUploadPackageLocalFileNotExist() throws Exception { Mockito.doReturn(false).when(uploader).isLocalFileExists(Mockito.anyString()); uploader.uploadPackage(); Mockito.verify(controller, Mockito.never()).copyFromLocalFile( Mockito.anyString(), Mockito.anyString()); } @Test(expected = UploaderException.class) public void testUploadPackageFailToCreateFolderOnHDFS() throws Exception { Mockito.doReturn(true).when(uploader).isLocalFileExists(Mockito.anyString()); Mockito.doReturn(false).when(controller).exists(Mockito.anyString()); Mockito.doReturn(false).when(controller).mkdirs(Mockito.anyString()); uploader.uploadPackage(); Mockito.verify(controller, Mockito.never()).copyFromLocalFile( Mockito.anyString(), Mockito.anyString()); } @Test(expected = UploaderException.class) public void testUploadPackageFailToCopyFromLocalToHDFS() throws Exception { Mockito.doReturn(true).when(uploader).isLocalFileExists(Mockito.anyString()); Mockito.doReturn(true).when(controller).mkdirs(Mockito.anyString()); Mockito.doReturn(false).when(controller).copyFromLocalFile( Mockito.anyString(), Mockito.anyString()); uploader.uploadPackage(); Mockito.verify(controller).copyFromLocalFile(Mockito.anyString(), Mockito.anyString()); } @Test public void testUploadPackage() { // Happy path Mockito.doReturn(true).when(uploader).isLocalFileExists(Mockito.anyString()); Mockito.doReturn(true).when(controller).mkdirs(Mockito.anyString()); Mockito.doReturn(true).when(controller).copyFromLocalFile( Mockito.anyString(), Mockito.anyString()); uploader.uploadPackage(); Mockito.verify(controller, Mockito.atLeastOnce()).copyFromLocalFile( Mockito.anyString(), Mockito.anyString()); } @Test public void testUndo() throws Exception { Mockito.doReturn(false).when(controller).delete(Mockito.anyString()); Assert.assertFalse(uploader.undo()); Mockito.verify(controller).delete(Mockito.anyString()); Mockito.doReturn(true).when(controller).delete(Mockito.anyString()); Assert.assertTrue(uploader.undo()); } }
1,223
449
#ifdef USE_CUDNN #include <algorithm> #include <vector> #include "caffe/filler.hpp" #include "caffe/layer.hpp" #include "caffe/util/im2col.hpp" #include "caffe/util/math_functions.hpp" #include "caffe/vision_layers.hpp" #include <boost/unordered_map.hpp> #include <cudnn.h> using boost::unordered_map; namespace caffe { // Set to three for the benefit of the backward pass, which // can use separate streams for calculating the gradient w.r.t. // bias, filter weights, and bottom data for each group independently #define CUDNN_FWD_STREAMS_PER_GROUP 1 #define CUDNN_BWD_STREAMS_PER_GROUP 2 template <typename Dtype> shared_ptr<SyncedMemory> CuDNNConvolutionLayer<Dtype>::workspaceData_fwd; template <typename Dtype> shared_ptr<SyncedMemory> CuDNNConvolutionLayer<Dtype>::workspaceData_bwd_filter; template <typename Dtype> shared_ptr<SyncedMemory> CuDNNConvolutionLayer<Dtype>::workspaceData_bwd_data; template <typename Dtype> size_t CuDNNConvolutionLayer<Dtype>::conv_layer_count = 0; /** * TODO(dox) explain cuDNN interface */ template <typename Dtype> void CuDNNConvolutionLayer<Dtype>::LayerSetUp( const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { ConvolutionLayer<Dtype>::LayerSetUp(bottom, top); if (conv_layer_count == 0){ workspaceData_fwd = shared_ptr<SyncedMemory>(new SyncedMemory()); workspaceData_bwd_filter = shared_ptr<SyncedMemory>(new SyncedMemory()); workspaceData_bwd_data = shared_ptr<SyncedMemory>(new SyncedMemory()); } conv_layer_count++; // Initialize CUDA streams and cuDNN. int total_streams_per_group = CUDNN_FWD_STREAMS_PER_GROUP + CUDNN_BWD_STREAMS_PER_GROUP; stream_ = new cudaStream_t[this->group_ * total_streams_per_group]; handle_ = new cudnnHandle_t[this->group_ * total_streams_per_group]; // initialize size arrays workspace_fwd_offsets_ = new size_t[bottom.size()]; workspace_bwd_filter_offsets_ = new size_t[bottom.size()]; workspace_bwd_data_offsets_ = new size_t[bottom.size()]; for (size_t i = 0; i < bottom.size(); ++i) { // initialize all to default algorithms fwd_algo_.push_back((cudnnConvolutionFwdAlgo_t)0); bwd_filter_algo_.push_back((cudnnConvolutionBwdFilterAlgo_t)0); bwd_data_algo_.push_back((cudnnConvolutionBwdDataAlgo_t)0); // default algorithms don't require workspace workspace_fwd_offsets_[i] = 0; workspace_bwd_filter_offsets_[i] = 0; workspace_bwd_data_offsets_[i] = 0; } for (int g = 0; g < this->group_ * total_streams_per_group; g++) { CUDA_CHECK(cudaStreamCreate(&stream_[g])); CUDNN_CHECK(cudnnCreate(&handle_[g])); CUDNN_CHECK(cudnnSetStream(handle_[g], stream_[g])); } // Set the indexing parameters. bias_offset_ = (this->num_output_ / this->group_); std::vector<int> kernel_shape; kernel_shape.push_back(this->num_output_ / this->group_); kernel_shape.push_back(this->channels_ / this->group_); for (unsigned int i = 0; i < this->num_spatial_axes_; ++i) kernel_shape.push_back(this->kernel_shape_.cpu_data()[i]); cudnn::createNdFilterDesc<Dtype>(&filter_desc_, kernel_shape); // Create tensor descriptor(s) for data and corresponding convolution(s). for (int i = 0; i < bottom.size(); i++) { cudnnTensorDescriptor_t bottom_desc; cudnn::createTensorDesc<Dtype>(&bottom_desc); bottom_descs_.push_back(bottom_desc); cudnnTensorDescriptor_t top_desc; cudnn::createTensorDesc<Dtype>(&top_desc); top_descs_.push_back(top_desc); cudnnConvolutionDescriptor_t conv_desc; cudnn::createConvolutionDesc<Dtype>(&conv_desc); conv_descs_.push_back(conv_desc); } // Tensor descriptor for bias. if (this->bias_term_) { cudnn::createTensorDesc<Dtype>(&bias_desc_); } handles_setup_ = true; need_benchmark_ = true; } template <typename Dtype> void CuDNNConvolutionLayer<Dtype>::Reshape( const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { ConvolutionLayer<Dtype>::Reshape(bottom, top); bottom_offset_ = this->bottom_dim_ / this->group_; top_offset_ = this->top_dim_ / this->group_; std::vector<int> bottom_tensor_shape(bottom[0]->shape()); bottom_tensor_shape[1] /= this->group_; std::vector<int> bottom_tensor_stride(bottom[0]->shape().size(), 1); for (int i = bottom[0]->shape().size() - 2; i >= 0; --i) { bottom_tensor_stride[i] = bottom[0]->shape(i + 1) * bottom_tensor_stride[i + 1]; } std::vector<int> top_tensor_shape(top[0]->shape()); top_tensor_shape[1] /= this->group_; std::vector<int> top_tensor_stride(top[0]->shape().size(), 1); for (int i = top[0]->shape().size() - 2; i >= 0; --i) { top_tensor_stride[i] = top[0]->shape(i + 1) * top_tensor_stride[i + 1]; } std::vector<int> pad, stride; for (unsigned int i = 0; i < this->num_spatial_axes_; ++i) { pad.push_back(this->pad_.cpu_data()[i]); stride.push_back(this->stride_.cpu_data()[i]); } // Specify workspace limit for kernels directly until we have a // planning strategy and a rewrite of Caffe's GPU memory mangagement. // // However this can be tuned by the "richness" parameter in the solver protobuf // By setting richness, you can increase the memory available to cuDNN and thus // let it choose fast but space consuming algorithms. for (int i = 0; i < bottom.size(); i++) { cudnn::setTensorNdDesc<Dtype>(&bottom_descs_[i], bottom_tensor_shape, bottom_tensor_stride); cudnn::setTensorNdDesc<Dtype>(&top_descs_[i], top_tensor_shape, top_tensor_stride); cudnn::setNdConvolutionDesc<Dtype>(&conv_descs_[i], bottom_descs_[i], filter_desc_, pad, stride); if (need_benchmark_){ // choose forward and backward algorithms + workspace(s) const int kRequestedForwardAlgoCount = 6; vector<cudnnConvolutionFwdAlgoPerf_t> fwd_perf; fwd_perf.resize(kRequestedForwardAlgoCount); int returnedAlgoCount; size_t mem_limit = 200*1024*1024; CUDNN_CHECK(cudnnGetConvolutionForwardAlgorithm (handle_[0], bottom_descs_[i], filter_desc_, conv_descs_[i], top_descs_[i],CUDNN_CONVOLUTION_FWD_SPECIFY_WORKSPACE_LIMIT,mem_limit, &fwd_algo_[i])); /* CUDNN_CHECK(cudnnGetConvolutionForwardWorkspaceSize(handle_[0], bottom_descs_[i], filter_desc_, conv_descs_[i], top_descs_[i], kRequestedForwardAlgoCount, &returnedAlgoCount, &fwd_perf[0])); // choose the fastest within limit // if all algorithms exceed memory limit, we will use the 0 algorithm with no workspace for (int a = 0; a < kRequestedForwardAlgoCount; ++a){ if (fwd_perf[a].memory * this->group_ < (Caffe::cudnn_mem_richness() * 1024 * 1024) || Caffe::cudnn_mem_richness() == 0){ fwd_algo_[i] = fwd_perf[a].algo; break; } } */ // choose backward algorithm for filter const int kRequestedBackwardFilterAlgoCount = 4; vector<cudnnConvolutionBwdFilterAlgoPerf_t> bwd_filter_perf; bwd_filter_perf.resize(kRequestedBackwardFilterAlgoCount); CUDNN_CHECK(cudnnGetConvolutionBackwardFilterAlgorithm(handle_[0], bottom_descs_[i], top_descs_[i], conv_descs_[i], filter_desc_, CUDNN_CONVOLUTION_BWD_FILTER_SPECIFY_WORKSPACE_LIMIT, mem_limit, &bwd_filter_algo_[i])); /* CUDNN_CHECK(cudnnFindConvolutionBackwardFilterAlgorithm(handle_[0], bottom_descs_[i], top_descs_[i], conv_descs_[i], filter_desc_, kRequestedBackwardFilterAlgoCount, &returnedAlgoCount, &bwd_filter_perf[0])); // choose the fastest within limit // if all algorithms exceed memory limit, we will use the 0 algorithm with no workspace for (int a = 0; a < kRequestedBackwardFilterAlgoCount; ++a){ if (bwd_filter_perf[a].memory * this->group_ < (Caffe::cudnn_mem_richness() * 1024 * 1024) || Caffe::cudnn_mem_richness() == 0){ bwd_filter_algo_[i] = bwd_filter_perf[a].algo; break; } } */ // choose backward algo for data const int kRequestedBackwardDataAlgoCount = 4; vector<cudnnConvolutionBwdDataAlgoPerf_t> bwd_data_perf; bwd_data_perf.resize(kRequestedBackwardDataAlgoCount); //backward data CUDNN_CHECK(cudnnGetConvolutionBackwardDataAlgorithm(handle_[0], filter_desc_, top_descs_[i], conv_descs_[i], bottom_descs_[i], CUDNN_CONVOLUTION_BWD_DATA_SPECIFY_WORKSPACE_LIMIT, mem_limit, &bwd_data_algo_[i])); /* CUDNN_CHECK(cudnnFindConvolutionBackwardDataAlgorithm(handle_[0], filter_desc_, top_descs_[i], conv_descs_[i], bottom_descs_[i], kRequestedBackwardDataAlgoCount, &returnedAlgoCount, &bwd_data_perf[0])); // choose the fastest within limit // if all algorithms exceed memory limit, we will use the 0 algorithm with no workspace for (int a = 0; a < kRequestedBackwardDataAlgoCount; ++a){ if (bwd_data_perf[a].memory * this->group_ <(Caffe::cudnn_mem_richness() * 1024 * 1024) || Caffe::cudnn_mem_richness() == 0){ bwd_data_algo_[i] = bwd_data_perf[a].algo; break; } } */ need_benchmark_ = false; } } // Tensor descriptor for bias. if (this->bias_term_) { vector<int> bias_shape(bottom[0]->shape().size(), 1); bias_shape[1] = this->num_output_ / this->group_; cudnn::setTensorNdDesc<Dtype>(&bias_desc_, bias_shape); } AdjustWorkSpaces(); } template<typename Dtype> void CuDNNConvolutionLayer<Dtype>::AdjustWorkSpaces() { size_t workspace_size_fwd = 0; size_t workspace_size_bwd_data = 0; size_t workspace_size_bwd_filter = 0; for (int i = 0; i < fwd_algo_.size(); ++i){ size_t workspace_size; cudnnGetConvolutionForwardWorkspaceSize(handle_[0], bottom_descs_[i], filter_desc_, conv_descs_[i], top_descs_[i], fwd_algo_[i], &workspace_size); workspace_fwd_offsets_[i] = workspace_size; workspace_size_fwd = std::max(workspace_size * this->group_, workspace_size_fwd); cudnnGetConvolutionBackwardFilterWorkspaceSize(handle_[1], bottom_descs_[i], top_descs_[i], conv_descs_[i], filter_desc_, bwd_filter_algo_[i], &workspace_size); workspace_bwd_filter_offsets_[i] = workspace_size; workspace_size_bwd_filter = std::max(workspace_size * this->group_, workspace_size_bwd_filter); cudnnGetConvolutionBackwardDataWorkspaceSize(handle_[2], filter_desc_, top_descs_[i], conv_descs_[i], bottom_descs_[i], bwd_data_algo_[i], &workspace_size); workspace_bwd_data_offsets_[i] = workspace_size; workspace_size_bwd_data = std::max(workspace_size * this->group_, workspace_size_bwd_data); } workspaceData_fwd->Resize(workspace_size_fwd); workspaceData_bwd_filter->Resize(workspace_size_bwd_filter); workspaceData_bwd_data->Resize(workspace_size_bwd_data); } template <typename Dtype> CuDNNConvolutionLayer<Dtype>::~CuDNNConvolutionLayer() { // Check that handles have been setup before destroying. if (!handles_setup_) { return; } for (int i = 0; i < bottom_descs_.size(); i++) { cudnnDestroyTensorDescriptor(bottom_descs_[i]); cudnnDestroyTensorDescriptor(top_descs_[i]); cudnnDestroyConvolutionDescriptor(conv_descs_[i]); } if (this->bias_term_) { cudnnDestroyTensorDescriptor(bias_desc_); } cudnnDestroyFilterDescriptor(filter_desc_); int total_stream_per_group = CUDNN_FWD_STREAMS_PER_GROUP + CUDNN_BWD_STREAMS_PER_GROUP; for (int g = 0; g < this->group_ * total_stream_per_group; g++) { cudaStreamDestroy(stream_[g]); cudnnDestroy(handle_[g]); } --conv_layer_count; if (conv_layer_count == 0){ workspaceData_fwd.reset(); workspaceData_bwd_filter.reset(); workspaceData_bwd_data.reset(); } delete [] stream_; delete [] handle_; delete [] workspace_fwd_offsets_; delete [] workspace_bwd_data_offsets_; delete [] workspace_bwd_filter_offsets_; } INSTANTIATE_CLASS(CuDNNConvolutionLayer); } // namespace caffe #endif
7,188
22,688
<filename>cyber/parameter/parameter_client.cc /****************************************************************************** * Copyright 2018 The Apollo Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ #include "cyber/parameter/parameter_client.h" #include "cyber/node/node.h" #include "cyber/parameter/parameter_service_names.h" namespace apollo { namespace cyber { ParameterClient::ParameterClient(const std::shared_ptr<Node>& node, const std::string& service_node_name) : node_(node) { get_parameter_client_ = node_->CreateClient<ParamName, Param>( FixParameterServiceName(service_node_name, GET_PARAMETER_SERVICE_NAME)); set_parameter_client_ = node_->CreateClient<Param, BoolResult>( FixParameterServiceName(service_node_name, SET_PARAMETER_SERVICE_NAME)); list_parameters_client_ = node_->CreateClient<NodeName, Params>( FixParameterServiceName(service_node_name, LIST_PARAMETERS_SERVICE_NAME)); } bool ParameterClient::GetParameter(const std::string& param_name, Parameter* parameter) { auto request = std::make_shared<ParamName>(); request->set_value(param_name); auto response = get_parameter_client_->SendRequest(request); if (response == nullptr) { AERROR << "Call " << get_parameter_client_->ServiceName() << " failed"; return false; } if (response->type() == ParamType::NOT_SET) { AWARN << "Parameter " << param_name << " not exists yet."; return false; } parameter->FromProtoParam(*response); return true; } bool ParameterClient::SetParameter(const Parameter& parameter) { auto request = std::make_shared<Param>(parameter.ToProtoParam()); auto response = set_parameter_client_->SendRequest(request); if (response == nullptr) { AERROR << "Call " << set_parameter_client_->ServiceName() << " failed"; return false; } return response->value(); } bool ParameterClient::ListParameters(std::vector<Parameter>* parameters) { auto request = std::make_shared<NodeName>(); request->set_value(node_->Name()); auto response = list_parameters_client_->SendRequest(request); if (response == nullptr) { AERROR << "Call " << list_parameters_client_->ServiceName() << " failed"; return false; } for (auto& param : response->param()) { Parameter parameter; parameter.FromProtoParam(param); parameters->emplace_back(parameter); } return true; } } // namespace cyber } // namespace apollo
981
4,816
/** * @file include/retdec/fileformat/utils/file_io.h * @brief Functions for file I/O. * @copyright (c) 2017 Avast Software, licensed under the MIT license */ #ifndef RETDEC_FILEFORMAT_UTILS_FILE_IO_H #define RETDEC_FILEFORMAT_UTILS_FILE_IO_H #include <sstream> #include <string> #include <vector> namespace retdec { namespace fileformat { bool readHexString(std::istream &fileStream, std::string &hexa, std::size_t start = 0, std::size_t desiredSize = 0); bool readPlainString(std::istream &fileStream, std::string &plain, std::size_t start = 0, std::size_t desiredSize = 0); } // namespace fileformat } // namespace retdec #endif
224
354
class WhoisException(Exception): pass
11
3,095
<reponame>liyuzhao/QWidgetDemo /***************************************************************************** * vlc_atomic.h: ***************************************************************************** * Copyright (C) 2010 <NAME> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301, USA. *****************************************************************************/ #ifndef VLC_ATOMIC_H # define VLC_ATOMIC_H /** * \file * Atomic operations do not require locking, but they are not very powerful. */ /* Clang older versions support atomics but lacks the stdatomic.h header */ #if defined(__clang__) # if !defined(__has_include) || !__has_include(<stdatomic.h>) # define __STDC_NO_ATOMICS__ 1 # endif #endif # ifndef __cplusplus # if !defined (__STDC_NO_ATOMICS__) /*** Native C11 atomics ***/ # include <stdatomic.h> # else /*** Intel/GCC atomics ***/ # define ATOMIC_FLAG_INIT false # define ATOMIC_VAR_INIT(value) (value) # define atomic_init(obj, value) \ do { *(obj) = (value); } while(0) # define kill_dependency(y) \ ((void)0) # define atomic_thread_fence(order) \ __sync_synchronize() # define atomic_signal_fence(order) \ ((void)0) # define atomic_is_lock_free(obj) \ false typedef bool atomic_flag; typedef bool atomic_bool; typedef char atomic_char; typedef signed char atomic_schar; typedef unsigned char atomic_uchar; typedef short atomic_short; typedef unsigned short atomic_ushort; typedef int atomic_int; typedef unsigned int atomic_uint; typedef long atomic_long; typedef unsigned long atomic_ulong; typedef long long atomic_llong; typedef unsigned long long atomic_ullong; //typedef char16_t atomic_char16_t; //typedef char32_t atomic_char32_t; typedef wchar_t atomic_wchar_t; typedef int_least8_t atomic_int_least8_t; typedef uint_least8_t atomic_uint_least8_t; typedef int_least16_t atomic_int_least16_t; typedef uint_least16_t atomic_uint_least16_t; typedef int_least32_t atomic_int_least32_t; typedef uint_least32_t atomic_uint_least32_t; typedef int_least64_t atomic_int_least64_t; typedef uint_least64_t atomic_uint_least64_t; typedef int_fast8_t atomic_int_fast8_t; typedef uint_fast8_t atomic_uint_fast8_t; typedef int_fast16_t atomic_int_fast16_t; typedef uint_fast16_t atomic_uint_fast16_t; typedef int_fast32_t atomic_int_fast32_t; typedef uint_fast32_t atomic_uint_fast32_t; typedef int_fast64_t atomic_int_fast64_t; typedef uint_fast64_t atomic_uint_fast64_t; typedef intptr_t atomic_intptr_t; typedef uintptr_t atomic_uintptr_t; typedef size_t atomic_size_t; typedef ptrdiff_t atomic_ptrdiff_t; typedef intmax_t atomic_intmax_t; typedef uintmax_t atomic_uintmax_t; # define atomic_store(object,desired) \ do { \ *(object) = (desired); \ __sync_synchronize(); \ } while (0) # define atomic_store_explicit(object,desired,order) \ atomic_store(object,desired) # define atomic_load(object) \ (__sync_synchronize(), *(object)) # define atomic_load_explicit(object,order) \ atomic_load(object) # define atomic_exchange(object,desired) \ ({ \ typeof (object) _obj = (object); \ typeof (*object) _old; \ do \ _old = atomic_load(_obj); \ while (!__sync_bool_compare_and_swap(_obj, _old, (desired))); \ _old; \ }) # define atomic_exchange_explicit(object,desired,order) \ atomic_exchange(object,desired) # define atomic_compare_exchange(object,expected,desired) \ ({ \ typeof (object) _exp = (expected); \ typeof (*object) _old = *_exp; \ *_exp = __sync_val_compare_and_swap((object), _old, (desired)); \ *_exp == _old; \ }) # define atomic_compare_exchange_strong(object,expected,desired) \ atomic_compare_exchange(object, expected, desired) # define atomic_compare_exchange_strong_explicit(object,expected,desired,order,order_different) \ atomic_compare_exchange_strong(object, expected, desired) # define atomic_compare_exchange_weak(object,expected,desired) \ atomic_compare_exchange(object, expected, desired) # define atomic_compare_exchange_weak_explicit(object,expected,desired,order_equal,order_different) \ atomic_compare_exchange_weak(object, expected, desired) # define atomic_fetch_add(object,operand) \ __sync_fetch_and_add(object, operand) # define atomic_fetch_add_explicit(object,operand,order) \ atomic_fetch_add(object,operand) # define atomic_fetch_sub(object,operand) \ __sync_fetch_and_sub(object, operand) # define atomic_fetch_sub_explicit(object,operand,order) \ atomic_fetch_sub(object,operand) # define atomic_fetch_or(object,operand) \ __sync_fetch_and_or(object, operand) # define atomic_fetch_or_explicit(object,operand,order) \ atomic_fetch_or(object,operand) # define atomic_fetch_xor(object,operand) \ __sync_fetch_and_sub(object, operand) # define atomic_fetch_xor_explicit(object,operand,order) \ atomic_fetch_sub(object,operand) # define atomic_fetch_and(object,operand) \ __sync_fetch_and_and(object, operand) # define atomic_fetch_and_explicit(object,operand,order) \ atomic_fetch_and(object,operand) # define atomic_flag_test_and_set(object) \ atomic_exchange(object, true) # define atomic_flag_test_and_set_explicit(object,order) \ atomic_flag_test_and_set(object) # define atomic_flag_clear(object) \ atomic_store(object, false) # define atomic_flag_clear_explicit(object,order) \ atomic_flag_clear(object) # endif /* !C11 */ typedef atomic_uint_least32_t vlc_atomic_float; static inline void vlc_atomic_init_float(vlc_atomic_float *var, float f) { union { float f; uint32_t i; } u; u.f = f; atomic_init(var, u.i); } /** Helper to retrieve a single precision from an atom. */ static inline float vlc_atomic_load_float(vlc_atomic_float *atom) { union { float f; uint32_t i; } u; u.i = atomic_load(atom); return u.f; } /** Helper to store a single precision into an atom. */ static inline void vlc_atomic_store_float(vlc_atomic_float *atom, float f) { union { float f; uint32_t i; } u; u.f = f; atomic_store(atom, u.i); } # else /* C++ */ /*** Native C++11 atomics ***/ # include <atomic> # endif /* C++ */ #endif
2,861
36,552
<gh_stars>1000+ /* * * Copyright 2019 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "src/core/lib/iomgr/executor/threadpool.h" #include "test/core/util/test_config.h" static const int kSmallThreadPoolSize = 20; static const int kLargeThreadPoolSize = 100; static const int kThreadSmallIter = 100; static const int kThreadLargeIter = 10000; static void test_size_zero(void) { gpr_log(GPR_INFO, "test_size_zero"); grpc_core::ThreadPool* pool_size_zero = new grpc_core::ThreadPool(0); GPR_ASSERT(pool_size_zero->pool_capacity() == 1); delete pool_size_zero; } static void test_constructor_option(void) { gpr_log(GPR_INFO, "test_constructor_option"); // Tests options grpc_core::Thread::Options options; options.set_stack_size(192 * 1024); // Random non-default value grpc_core::ThreadPool* pool = new grpc_core::ThreadPool(0, "test_constructor_option", options); GPR_ASSERT(pool->thread_options().stack_size() == options.stack_size()); delete pool; } // Simple functor for testing. It will count how many times being called. class SimpleFunctorForAdd : public grpc_completion_queue_functor { public: friend class SimpleFunctorCheckForAdd; SimpleFunctorForAdd() { functor_run = &SimpleFunctorForAdd::Run; inlineable = true; internal_next = this; internal_success = 0; } ~SimpleFunctorForAdd() {} static void Run(struct grpc_completion_queue_functor* cb, int /*ok*/) { auto* callback = static_cast<SimpleFunctorForAdd*>(cb); callback->count_.fetch_add(1, std::memory_order_relaxed); } int count() { return count_.load(std::memory_order_relaxed); } private: std::atomic<int> count_{0}; }; static void test_add(void) { gpr_log(GPR_INFO, "test_add"); grpc_core::ThreadPool* pool = new grpc_core::ThreadPool(kSmallThreadPoolSize, "test_add"); SimpleFunctorForAdd* functor = new SimpleFunctorForAdd(); for (int i = 0; i < kThreadSmallIter; ++i) { pool->Add(functor); } delete pool; GPR_ASSERT(functor->count() == kThreadSmallIter); delete functor; gpr_log(GPR_DEBUG, "Done."); } // Thread that adds closures to pool class WorkThread { public: WorkThread(grpc_core::ThreadPool* pool, SimpleFunctorForAdd* cb, int num_add) : num_add_(num_add), cb_(cb), pool_(pool) { thd_ = grpc_core::Thread( "thread_pool_test_add_thd", [](void* th) { static_cast<WorkThread*>(th)->Run(); }, this); } ~WorkThread() {} void Start() { thd_.Start(); } void Join() { thd_.Join(); } private: void Run() { for (int i = 0; i < num_add_; ++i) { pool_->Add(cb_); } } int num_add_; SimpleFunctorForAdd* cb_; grpc_core::ThreadPool* pool_; grpc_core::Thread thd_; }; static void test_multi_add(void) { gpr_log(GPR_INFO, "test_multi_add"); const int num_work_thds = 10; grpc_core::ThreadPool* pool = new grpc_core::ThreadPool(kLargeThreadPoolSize, "test_multi_add"); SimpleFunctorForAdd* functor = new SimpleFunctorForAdd(); WorkThread** work_thds = static_cast<WorkThread**>( gpr_zalloc(sizeof(WorkThread*) * num_work_thds)); gpr_log(GPR_DEBUG, "Fork threads for adding..."); for (int i = 0; i < num_work_thds; ++i) { work_thds[i] = new WorkThread(pool, functor, kThreadLargeIter); work_thds[i]->Start(); } // Wait for all threads finish gpr_log(GPR_DEBUG, "Waiting for all work threads finish..."); for (int i = 0; i < num_work_thds; ++i) { work_thds[i]->Join(); delete work_thds[i]; } gpr_free(work_thds); gpr_log(GPR_DEBUG, "Done."); gpr_log(GPR_DEBUG, "Waiting for all closures finish..."); // Destructor of thread pool will wait for all closures to finish delete pool; GPR_ASSERT(functor->count() == kThreadLargeIter * num_work_thds); delete functor; gpr_log(GPR_DEBUG, "Done."); } // Checks the current count with a given number. class SimpleFunctorCheckForAdd : public grpc_completion_queue_functor { public: SimpleFunctorCheckForAdd(int ok, int* count) : count_(count) { functor_run = &SimpleFunctorCheckForAdd::Run; inlineable = true; internal_success = ok; } ~SimpleFunctorCheckForAdd() {} static void Run(struct grpc_completion_queue_functor* cb, int /*ok*/) { auto* callback = static_cast<SimpleFunctorCheckForAdd*>(cb); (*callback->count_)++; GPR_ASSERT(*callback->count_ == callback->internal_success); } private: int* count_; }; static void test_one_thread_FIFO(void) { gpr_log(GPR_INFO, "test_one_thread_FIFO"); int counter = 0; grpc_core::ThreadPool* pool = new grpc_core::ThreadPool(1, "test_one_thread_FIFO"); SimpleFunctorCheckForAdd** check_functors = static_cast<SimpleFunctorCheckForAdd**>( gpr_zalloc(sizeof(SimpleFunctorCheckForAdd*) * kThreadSmallIter)); for (int i = 0; i < kThreadSmallIter; ++i) { check_functors[i] = new SimpleFunctorCheckForAdd(i + 1, &counter); pool->Add(check_functors[i]); } // Destructor of pool will wait until all closures finished. delete pool; for (int i = 0; i < kThreadSmallIter; ++i) { delete check_functors[i]; } gpr_free(check_functors); gpr_log(GPR_DEBUG, "Done."); } int main(int argc, char** argv) { grpc::testing::TestEnvironment env(argc, argv); grpc_init(); test_size_zero(); test_constructor_option(); test_add(); test_multi_add(); test_one_thread_FIFO(); grpc_shutdown(); return 0; }
2,197
407
<reponame>iuskye/SREWorks<filename>saas/aiops/api/aisp/tdata-aisp-server/src/main/java/com/alibaba/tdata/aisp/server/common/schedule/TaskResultCleanSchedule.java package com.alibaba.tdata.aisp.server.common.schedule; import java.util.Date; import com.alibaba.tdata.aisp.server.common.properties.TaskRemainProperties; import com.alibaba.tdata.aisp.server.repository.AnalyseTaskRepository; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.time.DateUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; /** * @ClassName: TaskResultCleanSchdule * @Author: dyj * @DATE: 2021-12-07 * @Description: **/ @Component @Slf4j public class TaskResultCleanSchedule { @Autowired private AnalyseTaskRepository taskRepository; @Autowired private TaskRemainProperties taskRemainProperties; @Scheduled(initialDelay = 10000L, fixedRate = 86400000L) public void cleanResult(){ Date now = new Date(); Date date = DateUtils.addDays(now, -taskRemainProperties.getDays()); taskRepository.cleanResult(date); } }
439
18,396
<filename>trunk/3rdparty/ffmpeg-4-fit/libavutil/lfg.c /* * Lagged Fibonacci PRNG * Copyright (c) 2008 <NAME> * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include <inttypes.h> #include <limits.h> #include <math.h> #include "lfg.h" #include "crc.h" #include "md5.h" #include "error.h" #include "intreadwrite.h" #include "attributes.h" av_cold void av_lfg_init(AVLFG *c, unsigned int seed) { uint8_t tmp[16] = { 0 }; int i; for (i = 8; i < 64; i += 4) { AV_WL32(tmp, seed); tmp[4] = i; av_md5_sum(tmp, tmp, 16); c->state[i ] = AV_RL32(tmp); c->state[i + 1] = AV_RL32(tmp + 4); c->state[i + 2] = AV_RL32(tmp + 8); c->state[i + 3] = AV_RL32(tmp + 12); } c->index = 0; } void av_bmg_get(AVLFG *lfg, double out[2]) { double x1, x2, w; do { x1 = 2.0 / UINT_MAX * av_lfg_get(lfg) - 1.0; x2 = 2.0 / UINT_MAX * av_lfg_get(lfg) - 1.0; w = x1 * x1 + x2 * x2; } while (w >= 1.0); w = sqrt((-2.0 * log(w)) / w); out[0] = x1 * w; out[1] = x2 * w; } int av_lfg_init_from_data(AVLFG *c, const uint8_t *data, unsigned int length) { unsigned int beg, end, segm; const AVCRC *avcrc; uint32_t crc = 1; /* avoid integer overflow in the loop below. */ if (length > (UINT_MAX / 128U)) return AVERROR(EINVAL); c->index = 0; avcrc = av_crc_get_table(AV_CRC_32_IEEE); /* This can't fail. It's a well-defined table in crc.c */ /* across 64 segments of the incoming data, * do a running crc of each segment and store the crc as the state for that slot. * this works even if the length of the segment is 0 bytes. */ beg = 0; for (segm = 0;segm < 64;segm++) { end = (((segm + 1) * length) / 64); crc = av_crc(avcrc, crc, data + beg, end - beg); c->state[segm] = (unsigned int)crc; beg = end; } return 0; }
1,124
622
<reponame>Sam-Gao-Xin/Courses- package model.members; public class Visit { private int year; private int month; private int day; public Visit(int day, int month, int year){ this.day = day; this.month = month; this.year = year; } public String getDate(){ String dayString = prettyFormat(day); String monthString = prettyFormat(month); return dayString+"-"+monthString+"-"+year; } private String prettyFormat(int i){ if(i < 10){ return "" + "0" + i; } else { return "" + i; } } }
314
1,585
/* * Copyright (c) 2018 DataDirect Networks. All rights reserved. * $COPYRIGHT$ * * Additional copyrights may follow * * $HEADER$ */ #include "ompi_config.h" #include "fbtl_ime.h" #include "mpi.h" #include "ompi/constants.h" #include "ompi/mca/fbtl/fbtl.h" static ssize_t mca_fbtl_ime_nonblocking_op (ompio_file_t *fh, ompi_request_t *request, int io_op); ssize_t mca_fbtl_ime_ipreadv (ompio_file_t *fh, ompi_request_t *request) { return mca_fbtl_ime_nonblocking_op(fh, request, FBTL_IME_READ); } ssize_t mca_fbtl_ime_ipwritev (ompio_file_t *fh, ompi_request_t *request) { return mca_fbtl_ime_nonblocking_op(fh, request, FBTL_IME_WRITE); } static ssize_t mca_fbtl_ime_nonblocking_op (ompio_file_t *fh, ompi_request_t *request, int io_op) { mca_fbtl_ime_request_data_t *data; mca_ompio_request_t *req = (mca_ompio_request_t *) request; int i=0, req_index = 0, ret; data = (mca_fbtl_ime_request_data_t *) malloc ( sizeof (mca_fbtl_ime_request_data_t)); if ( NULL == data ) { opal_output (1,"could not allocate memory\n"); return OMPI_ERR_OUT_OF_RESOURCE; } /* We might allocate too much memory here because we don't know how many IME requests will be necessary. We will use all the iovec "slots" in the array, but maybe not all the request and request status slots. That is, because an IME request can handle several iovecs, not just one. */ data->allocated_data = (void*) malloc( fh->f_num_of_io_entries * (sizeof(struct iovec) + sizeof(struct ime_aiocb) + sizeof(ssize_t)) ); if (NULL == data->allocated_data) { opal_output(1, "OUT OF MEMORY\n"); free(data); return OMPI_ERR_OUT_OF_RESOURCE; } data->aio_iovecs = (struct iovec *) data->allocated_data; data->aio_reqs = (struct ime_aiocb *) (data->aio_iovecs + fh->f_num_of_io_entries); data->aio_req_status = (ssize_t *) (data->aio_reqs + fh->f_num_of_io_entries); /* Fill some attributes of the OMPIO request data */ data->aio_req_type = io_op; /* The correctness of io_op will be checked later */ data->aio_req_chunks = mca_fbtl_ime_aio_reqs_max; data->aio_req_fail_count = 0; data->aio_total_len = 0; data->aio_fh = fh; data->aio_reqs[0].iovcnt = 0; /* Go through all IO entries and try to aggregate them. */ for ( i=0; i<fh->f_num_of_io_entries; i++ ) { data->aio_iovecs[i].iov_base = fh->f_io_array[i].memory_address; data->aio_iovecs[i].iov_len = fh->f_io_array[i].length; /* If the processed iovec will be the first in our ime_aiocb request, then we initialize this aio request for IME. */ if (data->aio_reqs[req_index].iovcnt == 0) { data->aio_reqs[req_index].iov = &data->aio_iovecs[i]; data->aio_reqs[req_index].iovcnt = 1; data->aio_reqs[req_index].file_offset = (off_t) fh->f_io_array[i].offset; data->aio_reqs[req_index].fd = fh->fd; data->aio_reqs[req_index].complete_cb = &mca_fbtl_ime_complete_cb; data->aio_reqs[req_index].user_context = (intptr_t) &data->aio_req_status[req_index]; data->aio_req_status[req_index] = FBTL_IME_IN_PROGRESS; } /* Here we check if the next iovec will be appended to the current ime_aiocb request. ie: if data is contiguous AND we don't exceed the advised number of iovecs for IME In that case, the next iovec will be appended to the IME req. */ if (i+1 != fh->f_num_of_io_entries && ((OMPI_MPI_OFFSET_TYPE)(intptr_t)fh->f_io_array[i].offset + (ptrdiff_t)fh->f_io_array[i].length) == (OMPI_MPI_OFFSET_TYPE)(intptr_t)fh->f_io_array[i+1].offset && data->aio_reqs[req_index].iovcnt < mca_fbtl_ime_iov_max ) { data->aio_reqs[req_index].iovcnt++; } /* Otherwise, we need to create a new request (except if there is no next iovec to process) */ else if ( i+1 != fh->f_num_of_io_entries ) { req_index++; data->aio_reqs[req_index].iovcnt = 0; } } /* Fill the missing attributes of the OMPI request */ data->aio_req_count = req_index + 1; data->aio_open_reqs = req_index + 1; data->aio_first_active_req = 0; if ( data->aio_req_count > data->aio_req_chunks ) { data->aio_last_active_req = data->aio_req_chunks; } else { data->aio_last_active_req = data->aio_req_count; } /* Actually start the requests (or at least the first batch). In case an error happened when one request is started, we don't send the next ones and mark the failing request as the last active one. Finally we exit as if no error happened, because some other requests might have already been started and they need to be finalized properly (via the progress function). */ for (i=0; i < data->aio_last_active_req; i++) { switch(io_op) { case FBTL_IME_READ: ret = ime_native_aio_read(&data->aio_reqs[i]); if (ret < 0) { opal_output(1, "mca_fbtl_ime_nonblocking_op: error in " "ime_native_aio_read() error ret=%d %s", ret, strerror(errno)); data->aio_req_status[i] = FBTL_IME_REQ_ERROR; data->aio_last_active_req = i + 1; goto standard_exit; } break; case FBTL_IME_WRITE: ret = ime_native_aio_write(&data->aio_reqs[i]); if (ret < 0) { opal_output(1, "mca_fbtl_ime_nonblocking_op: error in " "ime_native_aio_write() error ret=%d %s", ret, strerror(errno)); data->aio_req_status[i] = FBTL_IME_REQ_ERROR; data->aio_last_active_req = i + 1; goto standard_exit; } break; default: opal_output(1, "mca_fbtl_ime_nonblocking_op: an unsupported " "IO operation was requested. io_op=%d", io_op); goto error_exit; } } standard_exit: req->req_data = data; req->req_progress_fn = mca_fbtl_ime_progress; req->req_free_fn = mca_fbtl_ime_request_free; return OMPI_SUCCESS; error_exit: free(data->allocated_data); free(data); return OMPI_ERROR; }
3,351
335
<filename>S/Similarity_noun.json { "word": "Similarity", "definitions": [ "The state or fact of being similar.", "A similar feature or aspect." ], "parts-of-speech": "Noun" }
88
2,288
<gh_stars>1000+ /* MIT (BSD) license - see LICENSE file for details */ #ifndef CCAN_UTF8_H #define CCAN_UTF8_H #include <inttypes.h> #include <stdbool.h> #include <string.h> /* Unicode is limited to 21 bits. */ #define UTF8_MAX_LEN 4 struct utf8_state { /* How many characters we are expecting as part of this Unicode point */ uint16_t total_len; /* How many characters we've already seen. */ uint16_t used_len; /* Compound character, aka Unicode point. */ uint32_t c; }; #define UTF8_STATE_INIT { 0, 0, 0 } static inline void utf8_state_init(struct utf8_state *utf8_state) { memset(utf8_state, 0, sizeof(*utf8_state)); } /** * utf8_decode - continue UTF8 decoding with this character. * @utf8_state - initialized UTF8 state. * @c - the character. * * Returns false if it needs another character to give results. * Otherwise returns true, @utf8_state can be reused without initializeation, * and sets errno: * 0: success * EINVAL: bad encoding (including a NUL character). * EFBIG: not a minimal encoding. * ERANGE: encoding of invalid character. * * You can extract the character from @utf8_state->c; @utf8_state->used_len * indicates how many characters have been consumed. */ bool utf8_decode(struct utf8_state *utf8_state, char c); /** * utf8_encode - encode a point into UTF8. * @point - Unicode point to include. * @dest - buffer to fill. * * Returns 0 if point was invalid, otherwise bytes of dest used. * Sets errno to ERANGE if point was invalid. */ size_t utf8_encode(uint32_t point, char dest[UTF8_MAX_LEN]); #endif /* CCAN_UTF8_H */
535
352
#include <stdint.h> #include "config.h" #define BM25_DEFAULT_B 0.75 #define BM25_DEFAULT_K1 1.5 struct BM25_term_i_args { uint32_t n_postings; /* only for printing */ float idf[MAX_MERGE_POSTINGS]; /* frac_b_avgDocLen is used for actual scoring for efficiency reason, * avgDocLen is just for complete arguments printing. */ float avgDocLen, frac_b_avgDocLen; float b, k1; }; void BM25_term_i_args_print(struct BM25_term_i_args*); float BM25_term_i_score(struct BM25_term_i_args*, uint32_t, float, float); float BM25_idf(float, float);
218
9,778
<filename>src/backend/access/hash/hashovfl.c<gh_stars>1000+ /*------------------------------------------------------------------------- * * hashovfl.c * Overflow page management code for the Postgres hash access method * * Portions Copyright (c) 1996-2021, PostgreSQL Global Development Group * Portions Copyright (c) 1994, Regents of the University of California * * * IDENTIFICATION * src/backend/access/hash/hashovfl.c * * NOTES * Overflow pages look like ordinary relation pages. * *------------------------------------------------------------------------- */ #include "postgres.h" #include "access/hash.h" #include "access/hash_xlog.h" #include "miscadmin.h" #include "utils/rel.h" static uint32 _hash_firstfreebit(uint32 map); /* * Convert overflow page bit number (its index in the free-page bitmaps) * to block number within the index. */ static BlockNumber bitno_to_blkno(HashMetaPage metap, uint32 ovflbitnum) { uint32 splitnum = metap->hashm_ovflpoint; uint32 i; /* Convert zero-based bitnumber to 1-based page number */ ovflbitnum += 1; /* Determine the split number for this page (must be >= 1) */ for (i = 1; i < splitnum && ovflbitnum > metap->hashm_spares[i]; i++) /* loop */ ; /* * Convert to absolute page number by adding the number of bucket pages * that exist before this split point. */ return (BlockNumber) (_hash_get_totalbuckets(i) + ovflbitnum); } /* * _hash_ovflblkno_to_bitno * * Convert overflow page block number to bit number for free-page bitmap. */ uint32 _hash_ovflblkno_to_bitno(HashMetaPage metap, BlockNumber ovflblkno) { uint32 splitnum = metap->hashm_ovflpoint; uint32 i; uint32 bitnum; /* Determine the split number containing this page */ for (i = 1; i <= splitnum; i++) { if (ovflblkno <= (BlockNumber) _hash_get_totalbuckets(i)) break; /* oops */ bitnum = ovflblkno - _hash_get_totalbuckets(i); /* * bitnum has to be greater than number of overflow page added in * previous split point. The overflow page at this splitnum (i) if any * should start from (_hash_get_totalbuckets(i) + * metap->hashm_spares[i - 1] + 1). */ if (bitnum > metap->hashm_spares[i - 1] && bitnum <= metap->hashm_spares[i]) return bitnum - 1; /* -1 to convert 1-based to 0-based */ } ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("invalid overflow block number %u", ovflblkno))); return 0; /* keep compiler quiet */ } /* * _hash_addovflpage * * Add an overflow page to the bucket whose last page is pointed to by 'buf'. * * On entry, the caller must hold a pin but no lock on 'buf'. The pin is * dropped before exiting (we assume the caller is not interested in 'buf' * anymore) if not asked to retain. The pin will be retained only for the * primary bucket. The returned overflow page will be pinned and * write-locked; it is guaranteed to be empty. * * The caller must hold a pin, but no lock, on the metapage buffer. * That buffer is returned in the same state. * * NB: since this could be executed concurrently by multiple processes, * one should not assume that the returned overflow page will be the * immediate successor of the originally passed 'buf'. Additional overflow * pages might have been added to the bucket chain in between. */ Buffer _hash_addovflpage(Relation rel, Buffer metabuf, Buffer buf, bool retain_pin) { Buffer ovflbuf; Page page; Page ovflpage; HashPageOpaque pageopaque; HashPageOpaque ovflopaque; HashMetaPage metap; Buffer mapbuf = InvalidBuffer; Buffer newmapbuf = InvalidBuffer; BlockNumber blkno; uint32 orig_firstfree; uint32 splitnum; uint32 *freep = NULL; uint32 max_ovflpg; uint32 bit; uint32 bitmap_page_bit; uint32 first_page; uint32 last_bit; uint32 last_page; uint32 i, j; bool page_found = false; /* * Write-lock the tail page. Here, we need to maintain locking order such * that, first acquire the lock on tail page of bucket, then on meta page * to find and lock the bitmap page and if it is found, then lock on meta * page is released, then finally acquire the lock on new overflow buffer. * We need this locking order to avoid deadlock with backends that are * doing inserts. * * Note: We could have avoided locking many buffers here if we made two * WAL records for acquiring an overflow page (one to allocate an overflow * page and another to add it to overflow bucket chain). However, doing * so can leak an overflow page, if the system crashes after allocation. * Needless to say, it is better to have a single record from a * performance point of view as well. */ LockBuffer(buf, BUFFER_LOCK_EXCLUSIVE); /* probably redundant... */ _hash_checkpage(rel, buf, LH_BUCKET_PAGE | LH_OVERFLOW_PAGE); /* loop to find current tail page, in case someone else inserted too */ for (;;) { BlockNumber nextblkno; page = BufferGetPage(buf); pageopaque = (HashPageOpaque) PageGetSpecialPointer(page); nextblkno = pageopaque->hasho_nextblkno; if (!BlockNumberIsValid(nextblkno)) break; /* we assume we do not need to write the unmodified page */ if (retain_pin) { /* pin will be retained only for the primary bucket page */ Assert((pageopaque->hasho_flag & LH_PAGE_TYPE) == LH_BUCKET_PAGE); LockBuffer(buf, BUFFER_LOCK_UNLOCK); } else _hash_relbuf(rel, buf); retain_pin = false; buf = _hash_getbuf(rel, nextblkno, HASH_WRITE, LH_OVERFLOW_PAGE); } /* Get exclusive lock on the meta page */ LockBuffer(metabuf, BUFFER_LOCK_EXCLUSIVE); _hash_checkpage(rel, metabuf, LH_META_PAGE); metap = HashPageGetMeta(BufferGetPage(metabuf)); /* start search at hashm_firstfree */ orig_firstfree = metap->hashm_firstfree; first_page = orig_firstfree >> BMPG_SHIFT(metap); bit = orig_firstfree & BMPG_MASK(metap); i = first_page; j = bit / BITS_PER_MAP; bit &= ~(BITS_PER_MAP - 1); /* outer loop iterates once per bitmap page */ for (;;) { BlockNumber mapblkno; Page mappage; uint32 last_inpage; /* want to end search with the last existing overflow page */ splitnum = metap->hashm_ovflpoint; max_ovflpg = metap->hashm_spares[splitnum] - 1; last_page = max_ovflpg >> BMPG_SHIFT(metap); last_bit = max_ovflpg & BMPG_MASK(metap); if (i > last_page) break; Assert(i < metap->hashm_nmaps); mapblkno = metap->hashm_mapp[i]; if (i == last_page) last_inpage = last_bit; else last_inpage = BMPGSZ_BIT(metap) - 1; /* Release exclusive lock on metapage while reading bitmap page */ LockBuffer(metabuf, BUFFER_LOCK_UNLOCK); mapbuf = _hash_getbuf(rel, mapblkno, HASH_WRITE, LH_BITMAP_PAGE); mappage = BufferGetPage(mapbuf); freep = HashPageGetBitmap(mappage); for (; bit <= last_inpage; j++, bit += BITS_PER_MAP) { if (freep[j] != ALL_SET) { page_found = true; /* Reacquire exclusive lock on the meta page */ LockBuffer(metabuf, BUFFER_LOCK_EXCLUSIVE); /* convert bit to bit number within page */ bit += _hash_firstfreebit(freep[j]); bitmap_page_bit = bit; /* convert bit to absolute bit number */ bit += (i << BMPG_SHIFT(metap)); /* Calculate address of the recycled overflow page */ blkno = bitno_to_blkno(metap, bit); /* Fetch and init the recycled page */ ovflbuf = _hash_getinitbuf(rel, blkno); goto found; } } /* No free space here, try to advance to next map page */ _hash_relbuf(rel, mapbuf); mapbuf = InvalidBuffer; i++; j = 0; /* scan from start of next map page */ bit = 0; /* Reacquire exclusive lock on the meta page */ LockBuffer(metabuf, BUFFER_LOCK_EXCLUSIVE); } /* * No free pages --- have to extend the relation to add an overflow page. * First, check to see if we have to add a new bitmap page too. */ if (last_bit == (uint32) (BMPGSZ_BIT(metap) - 1)) { /* * We create the new bitmap page with all pages marked "in use". * Actually two pages in the new bitmap's range will exist * immediately: the bitmap page itself, and the following page which * is the one we return to the caller. Both of these are correctly * marked "in use". Subsequent pages do not exist yet, but it is * convenient to pre-mark them as "in use" too. */ bit = metap->hashm_spares[splitnum]; /* metapage already has a write lock */ if (metap->hashm_nmaps >= HASH_MAX_BITMAPS) ereport(ERROR, (errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED), errmsg("out of overflow pages in hash index \"%s\"", RelationGetRelationName(rel)))); newmapbuf = _hash_getnewbuf(rel, bitno_to_blkno(metap, bit), MAIN_FORKNUM); } else { /* * Nothing to do here; since the page will be past the last used page, * we know its bitmap bit was preinitialized to "in use". */ } /* Calculate address of the new overflow page */ bit = BufferIsValid(newmapbuf) ? metap->hashm_spares[splitnum] + 1 : metap->hashm_spares[splitnum]; blkno = bitno_to_blkno(metap, bit); /* * Fetch the page with _hash_getnewbuf to ensure smgr's idea of the * relation length stays in sync with ours. XXX It's annoying to do this * with metapage write lock held; would be better to use a lock that * doesn't block incoming searches. * * It is okay to hold two buffer locks here (one on tail page of bucket * and other on new overflow page) since there cannot be anyone else * contending for access to ovflbuf. */ ovflbuf = _hash_getnewbuf(rel, blkno, MAIN_FORKNUM); found: /* * Do the update. No ereport(ERROR) until changes are logged. We want to * log the changes for bitmap page and overflow page together to avoid * loss of pages in case the new page is added. */ START_CRIT_SECTION(); if (page_found) { Assert(BufferIsValid(mapbuf)); /* mark page "in use" in the bitmap */ SETBIT(freep, bitmap_page_bit); MarkBufferDirty(mapbuf); } else { /* update the count to indicate new overflow page is added */ metap->hashm_spares[splitnum]++; if (BufferIsValid(newmapbuf)) { _hash_initbitmapbuffer(newmapbuf, metap->hashm_bmsize, false); MarkBufferDirty(newmapbuf); /* add the new bitmap page to the metapage's list of bitmaps */ metap->hashm_mapp[metap->hashm_nmaps] = BufferGetBlockNumber(newmapbuf); metap->hashm_nmaps++; metap->hashm_spares[splitnum]++; } MarkBufferDirty(metabuf); /* * for new overflow page, we don't need to explicitly set the bit in * bitmap page, as by default that will be set to "in use". */ } /* * Adjust hashm_firstfree to avoid redundant searches. But don't risk * changing it if someone moved it while we were searching bitmap pages. */ if (metap->hashm_firstfree == orig_firstfree) { metap->hashm_firstfree = bit + 1; MarkBufferDirty(metabuf); } /* initialize new overflow page */ ovflpage = BufferGetPage(ovflbuf); ovflopaque = (HashPageOpaque) PageGetSpecialPointer(ovflpage); ovflopaque->hasho_prevblkno = BufferGetBlockNumber(buf); ovflopaque->hasho_nextblkno = InvalidBlockNumber; ovflopaque->hasho_bucket = pageopaque->hasho_bucket; ovflopaque->hasho_flag = LH_OVERFLOW_PAGE; ovflopaque->hasho_page_id = HASHO_PAGE_ID; MarkBufferDirty(ovflbuf); /* logically chain overflow page to previous page */ pageopaque->hasho_nextblkno = BufferGetBlockNumber(ovflbuf); MarkBufferDirty(buf); /* XLOG stuff */ if (RelationNeedsWAL(rel)) { XLogRecPtr recptr; xl_hash_add_ovfl_page xlrec; xlrec.bmpage_found = page_found; xlrec.bmsize = metap->hashm_bmsize; XLogBeginInsert(); XLogRegisterData((char *) &xlrec, SizeOfHashAddOvflPage); XLogRegisterBuffer(0, ovflbuf, REGBUF_WILL_INIT); XLogRegisterBufData(0, (char *) &pageopaque->hasho_bucket, sizeof(Bucket)); XLogRegisterBuffer(1, buf, REGBUF_STANDARD); if (BufferIsValid(mapbuf)) { XLogRegisterBuffer(2, mapbuf, REGBUF_STANDARD); XLogRegisterBufData(2, (char *) &bitmap_page_bit, sizeof(uint32)); } if (BufferIsValid(newmapbuf)) XLogRegisterBuffer(3, newmapbuf, REGBUF_WILL_INIT); XLogRegisterBuffer(4, metabuf, REGBUF_STANDARD); XLogRegisterBufData(4, (char *) &metap->hashm_firstfree, sizeof(uint32)); recptr = XLogInsert(RM_HASH_ID, XLOG_HASH_ADD_OVFL_PAGE); PageSetLSN(BufferGetPage(ovflbuf), recptr); PageSetLSN(BufferGetPage(buf), recptr); if (BufferIsValid(mapbuf)) PageSetLSN(BufferGetPage(mapbuf), recptr); if (BufferIsValid(newmapbuf)) PageSetLSN(BufferGetPage(newmapbuf), recptr); PageSetLSN(BufferGetPage(metabuf), recptr); } END_CRIT_SECTION(); if (retain_pin) LockBuffer(buf, BUFFER_LOCK_UNLOCK); else _hash_relbuf(rel, buf); if (BufferIsValid(mapbuf)) _hash_relbuf(rel, mapbuf); LockBuffer(metabuf, BUFFER_LOCK_UNLOCK); if (BufferIsValid(newmapbuf)) _hash_relbuf(rel, newmapbuf); return ovflbuf; } /* * _hash_firstfreebit() * * Return the number of the first bit that is not set in the word 'map'. */ static uint32 _hash_firstfreebit(uint32 map) { uint32 i, mask; mask = 0x1; for (i = 0; i < BITS_PER_MAP; i++) { if (!(mask & map)) return i; mask <<= 1; } elog(ERROR, "firstfreebit found no free bit"); return 0; /* keep compiler quiet */ } /* * _hash_freeovflpage() - * * Remove this overflow page from its bucket's chain, and mark the page as * free. On entry, ovflbuf is write-locked; it is released before exiting. * * Add the tuples (itups) to wbuf in this function. We could do that in the * caller as well, but the advantage of doing it here is we can easily write * the WAL for XLOG_HASH_SQUEEZE_PAGE operation. Addition of tuples and * removal of overflow page has to done as an atomic operation, otherwise * during replay on standby users might find duplicate records. * * Since this function is invoked in VACUUM, we provide an access strategy * parameter that controls fetches of the bucket pages. * * Returns the block number of the page that followed the given page * in the bucket, or InvalidBlockNumber if no following page. * * NB: caller must not hold lock on metapage, nor on page, that's next to * ovflbuf in the bucket chain. We don't acquire the lock on page that's * prior to ovflbuf in chain if it is same as wbuf because the caller already * has a lock on same. */ BlockNumber _hash_freeovflpage(Relation rel, Buffer bucketbuf, Buffer ovflbuf, Buffer wbuf, IndexTuple *itups, OffsetNumber *itup_offsets, Size *tups_size, uint16 nitups, BufferAccessStrategy bstrategy) { HashMetaPage metap; Buffer metabuf; Buffer mapbuf; BlockNumber ovflblkno; BlockNumber prevblkno; BlockNumber blkno; BlockNumber nextblkno; BlockNumber writeblkno; HashPageOpaque ovflopaque; Page ovflpage; Page mappage; uint32 *freep; uint32 ovflbitno; int32 bitmappage, bitmapbit; Bucket bucket PG_USED_FOR_ASSERTS_ONLY; Buffer prevbuf = InvalidBuffer; Buffer nextbuf = InvalidBuffer; bool update_metap = false; /* Get information from the doomed page */ _hash_checkpage(rel, ovflbuf, LH_OVERFLOW_PAGE); ovflblkno = BufferGetBlockNumber(ovflbuf); ovflpage = BufferGetPage(ovflbuf); ovflopaque = (HashPageOpaque) PageGetSpecialPointer(ovflpage); nextblkno = ovflopaque->hasho_nextblkno; prevblkno = ovflopaque->hasho_prevblkno; writeblkno = BufferGetBlockNumber(wbuf); bucket = ovflopaque->hasho_bucket; /* * Fix up the bucket chain. this is a doubly-linked list, so we must fix * up the bucket chain members behind and ahead of the overflow page being * deleted. Concurrency issues are avoided by using lock chaining as * described atop hashbucketcleanup. */ if (BlockNumberIsValid(prevblkno)) { if (prevblkno == writeblkno) prevbuf = wbuf; else prevbuf = _hash_getbuf_with_strategy(rel, prevblkno, HASH_WRITE, LH_BUCKET_PAGE | LH_OVERFLOW_PAGE, bstrategy); } if (BlockNumberIsValid(nextblkno)) nextbuf = _hash_getbuf_with_strategy(rel, nextblkno, HASH_WRITE, LH_OVERFLOW_PAGE, bstrategy); /* Note: bstrategy is intentionally not used for metapage and bitmap */ /* Read the metapage so we can determine which bitmap page to use */ metabuf = _hash_getbuf(rel, HASH_METAPAGE, HASH_READ, LH_META_PAGE); metap = HashPageGetMeta(BufferGetPage(metabuf)); /* Identify which bit to set */ ovflbitno = _hash_ovflblkno_to_bitno(metap, ovflblkno); bitmappage = ovflbitno >> BMPG_SHIFT(metap); bitmapbit = ovflbitno & BMPG_MASK(metap); if (bitmappage >= metap->hashm_nmaps) elog(ERROR, "invalid overflow bit number %u", ovflbitno); blkno = metap->hashm_mapp[bitmappage]; /* Release metapage lock while we access the bitmap page */ LockBuffer(metabuf, BUFFER_LOCK_UNLOCK); /* read the bitmap page to clear the bitmap bit */ mapbuf = _hash_getbuf(rel, blkno, HASH_WRITE, LH_BITMAP_PAGE); mappage = BufferGetPage(mapbuf); freep = HashPageGetBitmap(mappage); Assert(ISSET(freep, bitmapbit)); /* Get write-lock on metapage to update firstfree */ LockBuffer(metabuf, BUFFER_LOCK_EXCLUSIVE); /* This operation needs to log multiple tuples, prepare WAL for that */ if (RelationNeedsWAL(rel)) XLogEnsureRecordSpace(HASH_XLOG_FREE_OVFL_BUFS, 4 + nitups); START_CRIT_SECTION(); /* * we have to insert tuples on the "write" page, being careful to preserve * hashkey ordering. (If we insert many tuples into the same "write" page * it would be worth qsort'ing them). */ if (nitups > 0) { _hash_pgaddmultitup(rel, wbuf, itups, itup_offsets, nitups); MarkBufferDirty(wbuf); } /* * Reinitialize the freed overflow page. Just zeroing the page won't * work, because WAL replay routines expect pages to be initialized. See * explanation of RBM_NORMAL mode atop XLogReadBufferExtended. We are * careful to make the special space valid here so that tools like * pageinspect won't get confused. */ _hash_pageinit(ovflpage, BufferGetPageSize(ovflbuf)); ovflopaque = (HashPageOpaque) PageGetSpecialPointer(ovflpage); ovflopaque->hasho_prevblkno = InvalidBlockNumber; ovflopaque->hasho_nextblkno = InvalidBlockNumber; ovflopaque->hasho_bucket = InvalidBucket; ovflopaque->hasho_flag = LH_UNUSED_PAGE; ovflopaque->hasho_page_id = HASHO_PAGE_ID; MarkBufferDirty(ovflbuf); if (BufferIsValid(prevbuf)) { Page prevpage = BufferGetPage(prevbuf); HashPageOpaque prevopaque = (HashPageOpaque) PageGetSpecialPointer(prevpage); Assert(prevopaque->hasho_bucket == bucket); prevopaque->hasho_nextblkno = nextblkno; MarkBufferDirty(prevbuf); } if (BufferIsValid(nextbuf)) { Page nextpage = BufferGetPage(nextbuf); HashPageOpaque nextopaque = (HashPageOpaque) PageGetSpecialPointer(nextpage); Assert(nextopaque->hasho_bucket == bucket); nextopaque->hasho_prevblkno = prevblkno; MarkBufferDirty(nextbuf); } /* Clear the bitmap bit to indicate that this overflow page is free */ CLRBIT(freep, bitmapbit); MarkBufferDirty(mapbuf); /* if this is now the first free page, update hashm_firstfree */ if (ovflbitno < metap->hashm_firstfree) { metap->hashm_firstfree = ovflbitno; update_metap = true; MarkBufferDirty(metabuf); } /* XLOG stuff */ if (RelationNeedsWAL(rel)) { xl_hash_squeeze_page xlrec; XLogRecPtr recptr; int i; xlrec.prevblkno = prevblkno; xlrec.nextblkno = nextblkno; xlrec.ntups = nitups; xlrec.is_prim_bucket_same_wrt = (wbuf == bucketbuf); xlrec.is_prev_bucket_same_wrt = (wbuf == prevbuf); XLogBeginInsert(); XLogRegisterData((char *) &xlrec, SizeOfHashSqueezePage); /* * bucket buffer needs to be registered to ensure that we can acquire * a cleanup lock on it during replay. */ if (!xlrec.is_prim_bucket_same_wrt) XLogRegisterBuffer(0, bucketbuf, REGBUF_STANDARD | REGBUF_NO_IMAGE); XLogRegisterBuffer(1, wbuf, REGBUF_STANDARD); if (xlrec.ntups > 0) { XLogRegisterBufData(1, (char *) itup_offsets, nitups * sizeof(OffsetNumber)); for (i = 0; i < nitups; i++) XLogRegisterBufData(1, (char *) itups[i], tups_size[i]); } XLogRegisterBuffer(2, ovflbuf, REGBUF_STANDARD); /* * If prevpage and the writepage (block in which we are moving tuples * from overflow) are same, then no need to separately register * prevpage. During replay, we can directly update the nextblock in * writepage. */ if (BufferIsValid(prevbuf) && !xlrec.is_prev_bucket_same_wrt) XLogRegisterBuffer(3, prevbuf, REGBUF_STANDARD); if (BufferIsValid(nextbuf)) XLogRegisterBuffer(4, nextbuf, REGBUF_STANDARD); XLogRegisterBuffer(5, mapbuf, REGBUF_STANDARD); XLogRegisterBufData(5, (char *) &bitmapbit, sizeof(uint32)); if (update_metap) { XLogRegisterBuffer(6, metabuf, REGBUF_STANDARD); XLogRegisterBufData(6, (char *) &metap->hashm_firstfree, sizeof(uint32)); } recptr = XLogInsert(RM_HASH_ID, XLOG_HASH_SQUEEZE_PAGE); PageSetLSN(BufferGetPage(wbuf), recptr); PageSetLSN(BufferGetPage(ovflbuf), recptr); if (BufferIsValid(prevbuf) && !xlrec.is_prev_bucket_same_wrt) PageSetLSN(BufferGetPage(prevbuf), recptr); if (BufferIsValid(nextbuf)) PageSetLSN(BufferGetPage(nextbuf), recptr); PageSetLSN(BufferGetPage(mapbuf), recptr); if (update_metap) PageSetLSN(BufferGetPage(metabuf), recptr); } END_CRIT_SECTION(); /* release previous bucket if it is not same as write bucket */ if (BufferIsValid(prevbuf) && prevblkno != writeblkno) _hash_relbuf(rel, prevbuf); if (BufferIsValid(ovflbuf)) _hash_relbuf(rel, ovflbuf); if (BufferIsValid(nextbuf)) _hash_relbuf(rel, nextbuf); _hash_relbuf(rel, mapbuf); _hash_relbuf(rel, metabuf); return nextblkno; } /* * _hash_initbitmapbuffer() * * Initialize a new bitmap page. All bits in the new bitmap page are set to * "1", indicating "in use". */ void _hash_initbitmapbuffer(Buffer buf, uint16 bmsize, bool initpage) { Page pg; HashPageOpaque op; uint32 *freep; pg = BufferGetPage(buf); /* initialize the page */ if (initpage) _hash_pageinit(pg, BufferGetPageSize(buf)); /* initialize the page's special space */ op = (HashPageOpaque) PageGetSpecialPointer(pg); op->hasho_prevblkno = InvalidBlockNumber; op->hasho_nextblkno = InvalidBlockNumber; op->hasho_bucket = InvalidBucket; op->hasho_flag = LH_BITMAP_PAGE; op->hasho_page_id = HASHO_PAGE_ID; /* set all of the bits to 1 */ freep = HashPageGetBitmap(pg); MemSet(freep, 0xFF, bmsize); /* * Set pd_lower just past the end of the bitmap page data. We could even * set pd_lower equal to pd_upper, but this is more precise and makes the * page look compressible to xlog.c. */ ((PageHeader) pg)->pd_lower = ((char *) freep + bmsize) - (char *) pg; } /* * _hash_squeezebucket(rel, bucket) * * Try to squeeze the tuples onto pages occurring earlier in the * bucket chain in an attempt to free overflow pages. When we start * the "squeezing", the page from which we start taking tuples (the * "read" page) is the last bucket in the bucket chain and the page * onto which we start squeezing tuples (the "write" page) is the * first page in the bucket chain. The read page works backward and * the write page works forward; the procedure terminates when the * read page and write page are the same page. * * At completion of this procedure, it is guaranteed that all pages in * the bucket are nonempty, unless the bucket is totally empty (in * which case all overflow pages will be freed). The original implementation * required that to be true on entry as well, but it's a lot easier for * callers to leave empty overflow pages and let this guy clean it up. * * Caller must acquire cleanup lock on the primary page of the target * bucket to exclude any scans that are in progress, which could easily * be confused into returning the same tuple more than once or some tuples * not at all by the rearrangement we are performing here. To prevent * any concurrent scan to cross the squeeze scan we use lock chaining * similar to hashbucketcleanup. Refer comments atop hashbucketcleanup. * * We need to retain a pin on the primary bucket to ensure that no concurrent * split can start. * * Since this function is invoked in VACUUM, we provide an access strategy * parameter that controls fetches of the bucket pages. */ void _hash_squeezebucket(Relation rel, Bucket bucket, BlockNumber bucket_blkno, Buffer bucket_buf, BufferAccessStrategy bstrategy) { BlockNumber wblkno; BlockNumber rblkno; Buffer wbuf; Buffer rbuf; Page wpage; Page rpage; HashPageOpaque wopaque; HashPageOpaque ropaque; /* * start squeezing into the primary bucket page. */ wblkno = bucket_blkno; wbuf = bucket_buf; wpage = BufferGetPage(wbuf); wopaque = (HashPageOpaque) PageGetSpecialPointer(wpage); /* * if there aren't any overflow pages, there's nothing to squeeze. caller * is responsible for releasing the pin on primary bucket page. */ if (!BlockNumberIsValid(wopaque->hasho_nextblkno)) { LockBuffer(wbuf, BUFFER_LOCK_UNLOCK); return; } /* * Find the last page in the bucket chain by starting at the base bucket * page and working forward. Note: we assume that a hash bucket chain is * usually smaller than the buffer ring being used by VACUUM, else using * the access strategy here would be counterproductive. */ rbuf = InvalidBuffer; ropaque = wopaque; do { rblkno = ropaque->hasho_nextblkno; if (rbuf != InvalidBuffer) _hash_relbuf(rel, rbuf); rbuf = _hash_getbuf_with_strategy(rel, rblkno, HASH_WRITE, LH_OVERFLOW_PAGE, bstrategy); rpage = BufferGetPage(rbuf); ropaque = (HashPageOpaque) PageGetSpecialPointer(rpage); Assert(ropaque->hasho_bucket == bucket); } while (BlockNumberIsValid(ropaque->hasho_nextblkno)); /* * squeeze the tuples. */ for (;;) { OffsetNumber roffnum; OffsetNumber maxroffnum; OffsetNumber deletable[MaxOffsetNumber]; IndexTuple itups[MaxIndexTuplesPerPage]; Size tups_size[MaxIndexTuplesPerPage]; OffsetNumber itup_offsets[MaxIndexTuplesPerPage]; uint16 ndeletable = 0; uint16 nitups = 0; Size all_tups_size = 0; int i; bool retain_pin = false; readpage: /* Scan each tuple in "read" page */ maxroffnum = PageGetMaxOffsetNumber(rpage); for (roffnum = FirstOffsetNumber; roffnum <= maxroffnum; roffnum = OffsetNumberNext(roffnum)) { IndexTuple itup; Size itemsz; /* skip dead tuples */ if (ItemIdIsDead(PageGetItemId(rpage, roffnum))) continue; itup = (IndexTuple) PageGetItem(rpage, PageGetItemId(rpage, roffnum)); itemsz = IndexTupleSize(itup); itemsz = MAXALIGN(itemsz); /* * Walk up the bucket chain, looking for a page big enough for * this item and all other accumulated items. Exit if we reach * the read page. */ while (PageGetFreeSpaceForMultipleTuples(wpage, nitups + 1) < (all_tups_size + itemsz)) { Buffer next_wbuf = InvalidBuffer; bool tups_moved = false; Assert(!PageIsEmpty(wpage)); if (wblkno == bucket_blkno) retain_pin = true; wblkno = wopaque->hasho_nextblkno; Assert(BlockNumberIsValid(wblkno)); /* don't need to move to next page if we reached the read page */ if (wblkno != rblkno) next_wbuf = _hash_getbuf_with_strategy(rel, wblkno, HASH_WRITE, LH_OVERFLOW_PAGE, bstrategy); if (nitups > 0) { Assert(nitups == ndeletable); /* * This operation needs to log multiple tuples, prepare * WAL for that. */ if (RelationNeedsWAL(rel)) XLogEnsureRecordSpace(0, 3 + nitups); START_CRIT_SECTION(); /* * we have to insert tuples on the "write" page, being * careful to preserve hashkey ordering. (If we insert * many tuples into the same "write" page it would be * worth qsort'ing them). */ _hash_pgaddmultitup(rel, wbuf, itups, itup_offsets, nitups); MarkBufferDirty(wbuf); /* Delete tuples we already moved off read page */ PageIndexMultiDelete(rpage, deletable, ndeletable); MarkBufferDirty(rbuf); /* XLOG stuff */ if (RelationNeedsWAL(rel)) { XLogRecPtr recptr; xl_hash_move_page_contents xlrec; xlrec.ntups = nitups; xlrec.is_prim_bucket_same_wrt = (wbuf == bucket_buf); XLogBeginInsert(); XLogRegisterData((char *) &xlrec, SizeOfHashMovePageContents); /* * bucket buffer needs to be registered to ensure that * we can acquire a cleanup lock on it during replay. */ if (!xlrec.is_prim_bucket_same_wrt) XLogRegisterBuffer(0, bucket_buf, REGBUF_STANDARD | REGBUF_NO_IMAGE); XLogRegisterBuffer(1, wbuf, REGBUF_STANDARD); XLogRegisterBufData(1, (char *) itup_offsets, nitups * sizeof(OffsetNumber)); for (i = 0; i < nitups; i++) XLogRegisterBufData(1, (char *) itups[i], tups_size[i]); XLogRegisterBuffer(2, rbuf, REGBUF_STANDARD); XLogRegisterBufData(2, (char *) deletable, ndeletable * sizeof(OffsetNumber)); recptr = XLogInsert(RM_HASH_ID, XLOG_HASH_MOVE_PAGE_CONTENTS); PageSetLSN(BufferGetPage(wbuf), recptr); PageSetLSN(BufferGetPage(rbuf), recptr); } END_CRIT_SECTION(); tups_moved = true; } /* * release the lock on previous page after acquiring the lock * on next page */ if (retain_pin) LockBuffer(wbuf, BUFFER_LOCK_UNLOCK); else _hash_relbuf(rel, wbuf); /* nothing more to do if we reached the read page */ if (rblkno == wblkno) { _hash_relbuf(rel, rbuf); return; } wbuf = next_wbuf; wpage = BufferGetPage(wbuf); wopaque = (HashPageOpaque) PageGetSpecialPointer(wpage); Assert(wopaque->hasho_bucket == bucket); retain_pin = false; /* be tidy */ for (i = 0; i < nitups; i++) pfree(itups[i]); nitups = 0; all_tups_size = 0; ndeletable = 0; /* * after moving the tuples, rpage would have been compacted, * so we need to rescan it. */ if (tups_moved) goto readpage; } /* remember tuple for deletion from "read" page */ deletable[ndeletable++] = roffnum; /* * we need a copy of index tuples as they can be freed as part of * overflow page, however we need them to write a WAL record in * _hash_freeovflpage. */ itups[nitups] = CopyIndexTuple(itup); tups_size[nitups++] = itemsz; all_tups_size += itemsz; } /* * If we reach here, there are no live tuples on the "read" page --- * it was empty when we got to it, or we moved them all. So we can * just free the page without bothering with deleting tuples * individually. Then advance to the previous "read" page. * * Tricky point here: if our read and write pages are adjacent in the * bucket chain, our write lock on wbuf will conflict with * _hash_freeovflpage's attempt to update the sibling links of the * removed page. In that case, we don't need to lock it again. */ rblkno = ropaque->hasho_prevblkno; Assert(BlockNumberIsValid(rblkno)); /* free this overflow page (releases rbuf) */ _hash_freeovflpage(rel, bucket_buf, rbuf, wbuf, itups, itup_offsets, tups_size, nitups, bstrategy); /* be tidy */ for (i = 0; i < nitups; i++) pfree(itups[i]); /* are we freeing the page adjacent to wbuf? */ if (rblkno == wblkno) { /* retain the pin on primary bucket page till end of bucket scan */ if (wblkno == bucket_blkno) LockBuffer(wbuf, BUFFER_LOCK_UNLOCK); else _hash_relbuf(rel, wbuf); return; } rbuf = _hash_getbuf_with_strategy(rel, rblkno, HASH_WRITE, LH_OVERFLOW_PAGE, bstrategy); rpage = BufferGetPage(rbuf); ropaque = (HashPageOpaque) PageGetSpecialPointer(rpage); Assert(ropaque->hasho_bucket == bucket); } /* NOTREACHED */ }
12,396
755
/* * Copyright (C) 2016 Airbnb, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.airbnb.rxgroups; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import io.reactivex.Observable; /** * Easily keep reference to {@link Observable}s across lifecycle changes. Observables are grouped by * a unique group id, which allows you to manage and reclaim subscriptions made with the same id. * Subscribe to observables, and then lock or unlock their observers to control when you get the * event back. Events will be held in a queue until an Observer is added and the group is unlocked. */ @SuppressWarnings("WeakerAccess") public class ObservableManager { /** Map ids to a group of observables. */ private final Map<Long, ObservableGroup> observableGroupMap = new ConcurrentHashMap<>(); private final AtomicLong nextId = new AtomicLong(1); private final UUID uuid = UUID.randomUUID(); /** * @return an existing group provided groupId. Throws {@link IllegalStateException} if no group * with the provided groupId exists or it is already destroyed. */ public ObservableGroup getGroup(long groupId) { ObservableGroup observableGroup = observableGroupMap.get(groupId); if (observableGroup == null) { throw new IllegalArgumentException("Group not found with groupId=" + groupId); } if (observableGroup.isDestroyed()) { throw new IllegalArgumentException("Group is already destroyed with groupId=" + groupId); } return observableGroup; } /** @return a new {@link ObservableGroup} with a unique groupId */ public ObservableGroup newGroup() { long id = nextId.getAndIncrement(); ObservableGroup observableGroup = new ObservableGroup(id); observableGroupMap.put(id, observableGroup); return observableGroup; } UUID id() { return uuid; } /** * Clears the provided group. References will be released, and no future results will be returned. * Once a group is destroyed it is an error to use it again. */ public void destroy(ObservableGroup group) { group.destroy(); observableGroupMap.remove(group.id()); } }
779
739
/** * Defines the support classes and operations related to {@link org.fxmisc.richtext.model.EditableStyledDocument}, * the immutable model of rich-text content that can be rendered and edited. * * <p> * An {@link org.fxmisc.richtext.model.EditableStyledDocument} acts as an immutable model for rich-text content * that will be rendered by an object implementing the {@link org.fxmisc.richtext.TextEditingArea} interface. * A {@link org.fxmisc.richtext.model.StyledDocument} is composed of a list of * {@link org.fxmisc.richtext.model.Paragraph}s. Paragraphs are nothing more than an * object containing a paragraph style (type {@code PS}), a list of a generic segments (type {@code SEG}), and a * list of generic styles (type {@code S}) that can apply to a segment. Most of the time, either * {@link org.fxmisc.richtext.model.EditableStyledDocument} or * {@link org.fxmisc.richtext.model.ReadOnlyStyledDocument} are being used to implement that interface. * </p> * <p> * The document can include more than just text; thus, the segment generic * can be specified as regular text ({@link java.lang.String}) or as an {@link org.reactfx.util.Either} (e.g. * {@code Either<String, Image>} or as a nested Either (e.g. * {@code Either<String, Either<Image, Either<Circle, Square>}) if one wanted to have four different kinds of segments * (ways to specify a segment generic in a way that still makes the code easy to read are not described here). * </p> * <p> * To allow these generics, one must supply a {@link org.fxmisc.richtext.model.SegmentOps} object that can * correctly operate on the generic segments and their generic styles. In addition, a * {@link org.fxmisc.richtext.model.TextOps} adds one more method to its base interface by adding a method * that maps a {@link java.lang.String} to a given segment. For text-based custom segments, one should use * {@link org.fxmisc.richtext.model.SegmentOpsBase} and for node-based custom segments, one should use * {@link org.fxmisc.richtext.model.NodeSegmentOpsBase}. * </p> * <p> * The document also uses {@link org.fxmisc.richtext.model.StyleSpans} to store styles in a memory-efficient way. * To construct one, use {@link org.fxmisc.richtext.model.StyleSpans#singleton(org.fxmisc.richtext.model.StyleSpan)} * or {@link org.fxmisc.richtext.model.StyleSpansBuilder}. * </p> * <p> * To navigate throughout the document, read through the javadoc of * {@link org.fxmisc.richtext.model.TwoDimensional} and {@link org.fxmisc.richtext.model.TwoDimensional.Bias}. * Also, read the difference between "position" and "index" in * {@link org.fxmisc.richtext.model.StyledDocument#getAbsolutePosition(int, int)}. * </p> * <p>To serialize things correctly, see {@link org.fxmisc.richtext.model.Codec} and its static factory methods. * </p> * <p> * Lastly, the {@link org.fxmisc.richtext.model.EditableStyledDocument} can emit * {@link org.fxmisc.richtext.model.PlainTextChange}s or {@link org.fxmisc.richtext.model.RichTextChange}s * that can be used to undo/redo various changes. * </p> * * @see org.fxmisc.richtext.model.EditableStyledDocument * @see org.fxmisc.richtext.model.Paragraph * @see org.fxmisc.richtext.model.SegmentOps * @see org.fxmisc.richtext.model.TwoDimensional * @see org.fxmisc.richtext.model.TwoDimensional.Bias */ package org.fxmisc.richtext.model;
1,160
731
package com.webank.weevent.client; import java.util.Map; import java.util.concurrent.CompletableFuture; import lombok.NonNull; /** * Java Client for WeEvent * * @author matthewliu * @since 2019/07/18 */ public interface IWeEventClient { String defaultBrokerUrl = "http://localhost:8080/weevent-broker"; /** * builder class */ class Builder { // broker url private String brokerUrl = defaultBrokerUrl; // group id private String groupId = WeEvent.DEFAULT_GROUP_ID; // stomp's account&password private String userName = ""; private String password = ""; // rpc timeout, ms private int timeout = 5000; public Builder brokerUrl(String brokerUrl) { this.brokerUrl = brokerUrl; return this; } public Builder groupId(String groupId) { this.groupId = groupId; return this; } public Builder userName(String userName) { this.userName = userName; return this; } public Builder password(String password) { this.password = password; return this; } public Builder timeout(int timeout) { this.timeout = timeout; return this; } public IWeEventClient build() throws BrokerException { return new WeEventClient(this.brokerUrl, this.groupId, this.userName, this.password, this.timeout); } } /** * instance Builder * * @return Builder */ static Builder builder() { return new Builder(); } /** * Open a topic * * @param topic topic name * @return true if success * @throws BrokerException broker exception */ boolean open(String topic) throws BrokerException; /** * Close a topic. * * @param topic topic name * @return true if success * @throws BrokerException broker exception */ boolean close(String topic) throws BrokerException; /** * Check a topic is exist or not. * * @param topic topic name * @return true if exist * @throws BrokerException broker exception */ boolean exist(String topic) throws BrokerException; /** * Publish an event to topic in synchronize way. * * @param weEvent WeEvent(String topic, byte[] content, Map extensions) * @return send result, SendResult.SUCCESS if success, and SendResult.eventId * @throws BrokerException broker exception */ SendResult publish(WeEvent weEvent) throws BrokerException; /** * Publish an event to topic in asynchronous way. * * @param weEvent WeEvent(String topic, byte[] content, Map extensions) * @return send result, SendResult.SUCCESS if success, and SendResult.eventId * @throws BrokerException broker exception */ CompletableFuture<SendResult> publishAsync(WeEvent weEvent) throws BrokerException; /** * Interface for event notify callback */ interface EventListener { /** * Called while new event arrived. * * @param event the event */ void onEvent(WeEvent event); /** * Called while raise exception. * * @param e the e */ void onException(Throwable e); } /** * Subscribe events from topic. * * @param topic topic name * @param offset from next event after this offset(an event id), WeEvent.OFFSET_FIRST if from head of queue, WeEvent.OFFSET_LAST if from tail of queue * @param extension extension params * @param listener notify interface * @return subscription Id * @throws BrokerException invalid input param */ String subscribe(String topic, String offset, Map<String, String> extension, @NonNull EventListener listener) throws BrokerException; /** * Subscribe events from topic. * * @param topics topic list * @param offset from next event after this offset(an event id), WeEvent.OFFSET_FIRST if from head of queue, WeEvent.OFFSET_LAST if from tail of queue * @param extension extension params * @param listener notify interface * @return subscription Id * @throws BrokerException invalid input param */ String subscribe(String[] topics, String offset, Map<String, String> extension, @NonNull EventListener listener) throws BrokerException; /** * Unsubscribe an exist subscription subscribed by subscribe interface. * The consumer will no longer receive messages from broker after this. * * @param subscriptionId invalid input * @return success if true * @throws BrokerException broker exception */ boolean unSubscribe(String subscriptionId) throws BrokerException; /** * List all topics in WeEvent's broker. * * @param pageIndex page index, from 0 * @param pageSize page size, [10, 100) * @return topic list * @throws BrokerException broker exception */ TopicPage list(Integer pageIndex, Integer pageSize) throws BrokerException; /** * Get a topic information. * * @param topic topic name * @return topic information * @throws BrokerException broker exception */ TopicInfo state(String topic) throws BrokerException; /** * Get an event information. * * @param eventId event id * @return WeEvent * @throws BrokerException broker exception */ WeEvent getEvent(String eventId) throws BrokerException; }
2,079
324
<reponame>tormath1/jclouds /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.cloudstack.parse; import java.util.Set; import org.jclouds.cloudstack.domain.PortForwardingRule; import org.jclouds.cloudstack.domain.Tag; import org.jclouds.json.BaseSetParserTest; import org.jclouds.json.config.GsonModule; import org.jclouds.rest.annotations.SelectJson; import org.testng.annotations.Test; import com.google.common.collect.ImmutableSet; import com.google.inject.Guice; import com.google.inject.Injector; @Test(groups = "unit") public class ListPortForwardingRulesResponseTest extends BaseSetParserTest<PortForwardingRule> { @Override protected Injector injector() { return Guice.createInjector(new GsonModule() { @Override protected void configure() { bind(DateAdapter.class).to(Iso8601DateAdapter.class); super.configure(); } }); } @Override public String resource() { return "/listportforwardingrulesresponse.json"; } @Override @SelectJson("portforwardingrule") public Set<PortForwardingRule> expected() { Set<String> cidrs = ImmutableSet.of("0.0.0.0/1", "172.16.31.10/1"); return ImmutableSet.<PortForwardingRule> of( PortForwardingRule.builder().id("15").privatePort(22).protocol(PortForwardingRule.Protocol.TCP) .publicPort(2022).virtualMachineId("3").virtualMachineName("i-3-3-VM").IPAddressId("3") .IPAddress("172.16.17.32").state(PortForwardingRule.State.ACTIVE) .CIDRs(cidrs).tags(Tag.builder().account("1").domain("ROOT").domainId("1").key("some-tag").resourceId("15") .resourceType(Tag.ResourceType.PORT_FORWARDING_RULE).value("some-value").build()).build(), PortForwardingRule.builder().id("18").privatePort(22).protocol(PortForwardingRule.Protocol.TCP) .publicPort(22).virtualMachineId("89").virtualMachineName("i-3-89-VM").IPAddressId("34") .IPAddress("192.168.3.11").state(PortForwardingRule.State.ACTIVE).build()); } }
1,028
1,511
<gh_stars>1000+ * libxt_conntrack * Shared library add-on to iptables for conntrack matching support. * * GPL (C) 2001 <NAME> (<EMAIL>). * Copyright © CC Computer Consultants GmbH, 2007 - 2008 * <NAME> <<EMAIL>> */
84
1,761
<reponame>kryptine/dmd // check semantic analysis of C files /* TEST_OUTPUT: --- fail_compilation/failcstuff2.c(54): Error: `& var` has no effect fail_compilation/failcstuff2.c(55): Error: `*ptr` has no effect fail_compilation/failcstuff2.c(56): Error: `var` has no effect fail_compilation/failcstuff2.c(57): Error: `-var` has no effect fail_compilation/failcstuff2.c(58): Error: `~var` has no effect fail_compilation/failcstuff2.c(59): Error: `!var` has no effect fail_compilation/failcstuff2.c(113): Error: `cast(int)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(114): Error: `sizeof` is not a member of `int` fail_compilation/failcstuff2.c(115): Error: `cast(short)3` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(116): Error: cannot modify constant `4` fail_compilation/failcstuff2.c(117): Error: cannot modify constant `5` fail_compilation/failcstuff2.c(118): Error: cannot modify constant `6` fail_compilation/failcstuff2.c(119): Error: `cast(int)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(120): Error: `cast(int)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(121): Error: `cast(int)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(122): Error: `cast(int)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(123): Error: `cast(int)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(124): Error: `makeS22067().field` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(125): Error: `makeS22067().field` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(126): Error: `makeS22067().field` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(127): Error: `makeS22067().field` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(153): Error: `cast(short)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(154): Error: `cast(long)var` is not an lvalue and cannot be modified fail_compilation/failcstuff2.c(204): Error: variable `var` is used as a type fail_compilation/failcstuff2.c(203): variable `var` is declared here fail_compilation/failcstuff2.c(205): Error: variable `var` is used as a type fail_compilation/failcstuff2.c(203): variable `var` is declared here fail_compilation/failcstuff2.c(254): Error: identifier or `(` expected before `)` fail_compilation/failcstuff2.c(255): Error: identifier or `(` expected fail_compilation/failcstuff2.c(308): Error: cannot modify `const` expression `(*s).p` fail_compilation/failcstuff2.c(354): Error: variable `arr` cannot be read at compile time fail_compilation/failcstuff2.c(360): Error: variable `str` cannot be read at compile time fail_compilation/failcstuff2.c(404): Error: undefined identifier `p1` fail_compilation/failcstuff2.c(404): Error: undefined identifier `p2` --- */ /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=22069 #line 50 void test22069() { int var; int *ptr; &var; *ptr; +var; -var; ~var; !var; } /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=22067 #line 100 struct S22067 { int field; }; struct S22067 makeS22067() { return (struct S22067) { 42 }; } void test22067() { int var; (int) var = 1; sizeof(var) = 2; ++(short)3; --4; (5)++; (&6); ((int)var)++; ((int)var)--; ++(int)var; --(int)var; &(int)var; &makeS22067().field; makeS22067().field = 1; makeS22067().field++; --makeS22067().field; } /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=22068 #line 150 void test22068() { int var; ++(short) var; --(long long) var; } #line 200 /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=21992 void test21992(int var) { var = (var) ~ 1234; var = (var) ! 1234; } /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=22102 #line 250 typedef int int22102; void test22102() { int22102(); int22102(0); } /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=22405 #line 300 struct S22405 { int * const p; int *q; }; void test22405(struct S22405 *s) { s->p = (const int *)(s->q); } /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=22413 #line 350 void test22413a() { int arr[6] = {1,2,3,4,5,6}; int arr2[] = arr; } void test22413b() { const char *str = "hello"; char msg[] = str; } /***************************************************/ // https://issues.dlang.org/show_bug.cgi?id=22584 #line 400 long test22584(long p1, long p2); long test22584(long, long) { return p1 + p2; }
1,831
892
<reponame>westonsteimel/advisory-database-github { "schema_version": "1.2.0", "id": "GHSA-4h44-w6fm-548g", "modified": "2021-11-19T15:44:20Z", "published": "2020-07-29T16:15:12Z", "aliases": [ "CVE-2020-15086" ], "summary": "Potential Remote Code Execution in TYPO3 with mediace extension", "details": "> ### Meta\n> * CVSS: `CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H/E:F/RL:O/RC:C` (9.1)\n> * CWE-325, CWE-20, CWE-200, CWE-502\n\n### Problem\nIt has been discovered that an internal verification mechanism can be used to generate arbitrary checksums. This allows to inject arbitrary data having a valid cryptographic message authentication code (HMAC-SHA1) and can lead to various attack chains as described below.\n\n* [TYPO3-CORE-SA-2020-007](https://typo3.org/security/advisory/typo3-core-sa-2020-007), [CVE-2020-15099](https://nvd.nist.gov/vuln/detail/CVE-2020-15099): Potential Privilege Escalation\n + the database server used for a TYPO3 installation must be accessible for an attacker (either via internet or shared hosting network)\n + `CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H/E:F/RL:O/RC:C` (7.5, high)\n* [TYPO3-CORE-SA-2016-013](https://typo3.org/security/advisory/typo3-core-sa-2016-013), [CVE-2016-5091](https://nvd.nist.gov/vuln/detail/CVE-2016-5091): Insecure Deserialization & Remote Code Execution\n + an attacker must have access to at least one Extbase plugin or module action in a TYPO3 installation\n + `CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H/E:F/RL:O/RC:C` (9.1, critical)\n\nThe overall severity of this vulnerability is critical (9.1) based on mentioned attack chains and the fact it does not require any privileges.\n\n### Solution\nIn case the extension is not used and required at all, it is suggested to uninstall and remove it from the system completely. Otherwise, an updated version 7.6.5 is available from the TYPO3 extension manager, Packagist and the TYPO3 extension repository:\n\n* https://extensions.typo3.org/extension/download/mediace/7.6.5/zip/\n* https://packagist.org/packages/friendsoftypo3/mediace#7.6.5\n\nAs a precautionary measure it is advised to change `encryptionKey` and database credentials in `typo3conf/LocalConfiguration.php`.\n\n### Credits\nThanks to TYPO3 security team member <NAME> who reported and fixed the issue.\n\n### References\n* [TYPO3-EXT-SA-2020-014](https://typo3.org/security/advisory/typo3-ext-sa-2020-014)", "severity": [ { "type": "CVSS_V3", "score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" } ], "affected": [ { "package": { "ecosystem": "Packagist", "name": "friendsoftypo3/mediace" }, "ranges": [ { "type": "ECOSYSTEM", "events": [ { "introduced": "7.6.2" }, { "fixed": "7.6.5" } ] } ] } ], "references": [ { "type": "WEB", "url": "https://github.com/FriendsOfTYPO3/mediace/security/advisories/GHSA-4h44-w6fm-548g" }, { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2020-15086" }, { "type": "WEB", "url": "https://github.com/FriendsOfTYPO3/mediace/pull/31" }, { "type": "WEB", "url": "https://github.com/FriendsOfTYPO3/mediace/commit/fa29ffd3e8b275782a8600d2406e1b1e5e16ae75" }, { "type": "PACKAGE", "url": "https://github.com/FriendsOfTYPO3/mediace" } ], "database_specific": { "cwe_ids": [ "CWE-20", "CWE-200", "CWE-325", "CWE-502" ], "severity": "HIGH", "github_reviewed": true } }
1,642
973
<gh_stars>100-1000 package com.xw.project.gracefulmovies.data.ao; /** * <p> * Created by woxingxiao on 2019-01-10. */ public class ReGeoResult { private String status; private String info; private String infocode; private ReGeo regeocode; public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getInfo() { return info; } public void setInfo(String info) { this.info = info; } public String getInfocode() { return infocode; } public void setInfocode(String infocode) { this.infocode = infocode; } public ReGeo getRegeocode() { return regeocode; } public void setRegeocode(ReGeo regeocode) { this.regeocode = regeocode; } //////////////////////////////////////////////////////////////////////////////////////////////// public static class ReGeo { public ReGeoInfo addressComponent; } //////////////////////////////////////////////////////////////////////////////////////////////// public static class ReGeoInfo { public String province; public Object city; } }
464
338
/** * @file testmain.cpp * @brief main file for Catch2 tests * @author <NAME> * @date Sep 7, 2018 */ #define CATCH_CONFIG_MAIN #include <minisam/3rdparty/Catch2/catch.hpp>
80
799
import demistomock as demisto from CommonServerPython import * from CommonServerUserPython import * from multiprocessing import Process from gevent.pywsgi import WSGIServer import subprocess import gevent from signal import SIGUSR1 import requests from flask.logging import default_handler from typing import Any, Dict import os import traceback from string import Template class Handler: @staticmethod def write(msg: str): demisto.info(msg) class ErrorHandler: @staticmethod def write(msg: str): demisto.error(f'wsgi error: {msg}') DEMISTO_LOGGER: Handler = Handler() ERROR_LOGGER: ErrorHandler = ErrorHandler() # nginx server params NGINX_SERVER_ACCESS_LOG = '/var/log/nginx/access.log' NGINX_SERVER_ERROR_LOG = '/var/log/nginx/error.log' NGINX_SERVER_CONF_FILE = '/etc/nginx/conf.d/default.conf' NGINX_SSL_KEY_FILE = '/etc/nginx/ssl/ssl.key' NGINX_SSL_CRT_FILE = '/etc/nginx/ssl/ssl.crt' NGINX_SSL_CERTS = f''' ssl_certificate {NGINX_SSL_CRT_FILE}; ssl_certificate_key {NGINX_SSL_KEY_FILE}; ''' NGINX_SERVER_CONF = ''' server { listen $port default_server $ssl; $sslcerts proxy_cache_key $scheme$proxy_host$request_uri$extra_cache_key; # Static test file location = /nginx-test { alias /var/lib/nginx/html/index.html; default_type text/html; } # Proxy everything to python location / { proxy_pass http://localhost:$serverport/; add_header X-Proxy-Cache $upstream_cache_status; # allow bypassing the cache with an arg of nocache=1 ie http://server:7000/?nocache=1 proxy_cache_bypass $arg_nocache; } } ''' def create_nginx_server_conf(file_path: str, port: int, params: Dict): """Create nginx conf file Args: file_path (str): path of server conf file port (int): listening port. server port to proxy to will be port+1 params (Dict): additional nginx params Raises: DemistoException: raised if there is a detected config error """ params = demisto.params() if not params else params template_str = params.get('nginx_server_conf') or NGINX_SERVER_CONF certificate: str = params.get('certificate', '') private_key: str = params.get('key', '') ssl = '' sslcerts = '' serverport = port + 1 extra_cache_key = '' if (certificate and not private_key) or (private_key and not certificate): raise DemistoException('If using HTTPS connection, both certificate and private key should be provided.') if certificate and private_key: demisto.debug('Using HTTPS for nginx conf') with open(NGINX_SSL_CRT_FILE, 'wt') as f: f.write(certificate) with open(NGINX_SSL_KEY_FILE, 'wt') as f: f.write(private_key) ssl = 'ssl' # to be included in the listen directive sslcerts = NGINX_SSL_CERTS credentials = params.get('credentials') or {} if credentials.get('identifier'): extra_cache_key = "$http_authorization" server_conf = Template(template_str).safe_substitute(port=port, serverport=serverport, ssl=ssl, sslcerts=sslcerts, extra_cache_key=extra_cache_key) with open(file_path, mode='wt+') as f: f.write(server_conf) def start_nginx_server(port: int, params: Dict = {}) -> subprocess.Popen: params = demisto.params() if not params else params create_nginx_server_conf(NGINX_SERVER_CONF_FILE, port, params) nginx_global_directives = 'daemon off;' global_directives_conf = params.get('nginx_global_directives') if global_directives_conf: nginx_global_directives = f'{nginx_global_directives} {global_directives_conf}' directive_args = ['-g', nginx_global_directives] # we first do a test that all config is good and log it try: nginx_test_command = ['nginx', '-T'] nginx_test_command.extend(directive_args) test_output = subprocess.check_output(nginx_test_command, stderr=subprocess.STDOUT, text=True) demisto.info(f'ngnix test passed. command: [{nginx_test_command}]') demisto.debug(f'nginx test ouput:\n{test_output}') except subprocess.CalledProcessError as err: raise ValueError(f"Failed testing nginx conf. Return code: {err.returncode}. Output: {err.output}") nginx_command = ['nginx'] nginx_command.extend(directive_args) res = subprocess.Popen(nginx_command, text=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) demisto.info(f'done starting nginx with pid: {res.pid}') return res def nginx_log_process(nginx_process: subprocess.Popen): try: old_access = NGINX_SERVER_ACCESS_LOG + '.old' old_error = NGINX_SERVER_ERROR_LOG + '.old' log_access = False log_error = False # first check if one of the logs are missing. This may happen on rare ocations that we renamed and deleted the file # before nginx completed the role over of the logs missing_log = False if not os.path.isfile(NGINX_SERVER_ACCESS_LOG): missing_log = True demisto.info(f'Missing access log: {NGINX_SERVER_ACCESS_LOG}. Will send roll signal to nginx.') if not os.path.isfile(NGINX_SERVER_ERROR_LOG): missing_log = True demisto.info(f'Missing error log: {NGINX_SERVER_ERROR_LOG}. Will send roll signal to nginx.') if missing_log: nginx_process.send_signal(int(SIGUSR1)) demisto.info(f'Done sending roll signal to nginx (pid: {nginx_process.pid}) after detecting missing log file.' ' Will skip this iteration.') return if os.path.getsize(NGINX_SERVER_ACCESS_LOG): log_access = True os.rename(NGINX_SERVER_ACCESS_LOG, old_access) if os.path.getsize(NGINX_SERVER_ERROR_LOG): log_error = True os.rename(NGINX_SERVER_ERROR_LOG, old_error) if log_access or log_error: # nginx rolls the logs when getting sigusr1 nginx_process.send_signal(int(SIGUSR1)) gevent.sleep(0.5) # sleep 0.5 to let nginx complete the roll if log_access: with open(old_access, 'rt') as f: start = 1 for lines in batch(f.readlines(), 100): end = start + len(lines) demisto.info(f'nginx access log ({start}-{end-1}): ' + ''.join(lines)) start = end os.unlink(old_access) if log_error: with open(old_error, 'rt') as f: start = 1 for lines in batch(f.readlines(), 100): end = start + len(lines) demisto.error(f'nginx error log ({start}-{end-1}): ' + ''.join(lines)) start = end os.unlink(old_error) except Exception as e: demisto.error(f'Failed nginx log processing: {e}. Exception: {traceback.format_exc()}') def nginx_log_monitor_loop(nginx_process: subprocess.Popen): """An endless loop to monitor nginx logs. Meant to be spawned as a greenlet. Will run every minute and if needed will dump the nginx logs and roll them if needed. Args: nginx_process (subprocess.Popen): the nginx process. Will send signal for log rolling. """ while True: gevent.sleep(60) nginx_log_process(nginx_process) def test_nginx_server(port: int, params: Dict): nginx_process = start_nginx_server(port, params) # let nginx startup time.sleep(0.5) try: protocol = 'https' if params.get('key') else 'http' res = requests.get(f'{protocol}://localhost:{port}/nginx-test', verify=False, proxies={"http": "", "https": ""}) # nosec guardrails-disable-line res.raise_for_status() welcome = 'Welcome to nginx' if welcome not in res.text: raise ValueError(f'Unexpected response from nginx-text (does not contain "{welcome}"): {res.text}') finally: try: nginx_process.terminate() nginx_process.wait(1.0) except Exception as ex: demisto.error(f'failed stoping test nginx process: {ex}') def try_parse_integer(int_to_parse: Any, err_msg: str) -> int: """ Tries to parse an integer, and if fails will throw DemistoException with given err_msg """ try: res = int(int_to_parse) except (TypeError, ValueError): raise DemistoException(err_msg) return res def get_params_port(params: Dict = None) -> int: """ Gets port from the integration parameters """ params = demisto.params() if not params else params port_mapping: str = params.get('longRunningPort', '') err_msg: str port: int if port_mapping: err_msg = f'Listen Port must be an integer. {port_mapping} is not valid.' if ':' in port_mapping: port = try_parse_integer(port_mapping.split(':')[1], err_msg) else: port = try_parse_integer(port_mapping, err_msg) else: raise ValueError('Please provide a Listen Port.') return port def run_long_running(params: Dict = None, is_test: bool = False): """ Start the long running server :param params: Demisto params :param is_test: Indicates whether it's test-module run or regular run :return: None """ params = demisto.params() if not params else params nginx_process = None nginx_log_monitor = None try: nginx_port = get_params_port() server_port = nginx_port + 1 # set our own log handlers APP.logger.removeHandler(default_handler) # type: ignore[name-defined] # pylint: disable=E0602 integration_logger = IntegrationLogger() integration_logger.buffering = False log_handler = DemistoHandler(integration_logger) log_handler.setFormatter( logging.Formatter("flask log: [%(asctime)s] %(levelname)s in %(module)s: %(message)s") ) APP.logger.addHandler(log_handler) # type: ignore[name-defined] # pylint: disable=E0602 demisto.debug('done setting demisto handler for logging') server = WSGIServer(('0.0.0.0', server_port), APP, log=DEMISTO_LOGGER, # type: ignore[name-defined] # pylint: disable=E0602 error_log=ERROR_LOGGER) if is_test: test_nginx_server(nginx_port, params) server_process = Process(target=server.serve_forever) server_process.start() time.sleep(5) try: server_process.terminate() server_process.join(1.0) except Exception as ex: demisto.error(f'failed stoping test wsgi server process: {ex}') else: nginx_process = start_nginx_server(nginx_port, params) nginx_log_monitor = gevent.spawn(nginx_log_monitor_loop, nginx_process) demisto.updateModuleHealth('') server.serve_forever() except Exception as e: error_message = str(e) demisto.error(f'An error occurred: {error_message}. Exception: {traceback.format_exc()}') demisto.updateModuleHealth(f'An error occurred: {error_message}') raise ValueError(error_message) finally: if nginx_process: try: nginx_process.terminate() except Exception as ex: demisto.error(f'Failed stopping nginx process when exiting: {ex}') if nginx_log_monitor: try: nginx_log_monitor.kill(timeout=1.0) except Exception as ex: demisto.error(f'Failed stopping nginx_log_monitor when exiting: {ex}')
5,167
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.versioning.ui.history; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.*; import java.util.Collection; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import javax.swing.*; import org.netbeans.api.diff.*; import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandleFactory; import org.netbeans.modules.versioning.core.api.VCSFileProxy; import org.netbeans.modules.versioning.diff.DiffViewModeSwitcher; import org.netbeans.modules.versioning.ui.history.HistoryComponent.CompareMode; import org.netbeans.modules.versioning.util.NoContentPanel; import org.netbeans.modules.versioning.util.Utils; import org.openide.explorer.ExplorerManager; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; import org.openide.loaders.DataObject; import org.openide.loaders.DataObjectNotFoundException; import org.openide.nodes.Node; import org.openide.util.Cancellable; import org.openide.util.NbBundle; import org.openide.util.Lookup; import org.openide.util.RequestProcessor.Task; import org.openide.util.lookup.Lookups; /** * * @author <NAME> */ public class HistoryDiffView implements PropertyChangeListener { private final HistoryComponent tc; private DiffPanel panel; private Component diffComponent; private DiffController diffView; private DiffTask diffTask; private final Object VIEW_LOCK = new Object(); private DiffViewModeSwitcher diffViewModeSwitcher; /** Creates a new instance of HistoryDiffView */ public HistoryDiffView(HistoryComponent tc) { this.tc = tc; panel = new DiffPanel(); showNoContent(NbBundle.getMessage(HistoryDiffView.class, "MSG_DiffPanel_NoVersion")); // NOI18N } @Override public void propertyChange(PropertyChangeEvent evt) { if(ExplorerManager.PROP_SELECTED_NODES.equals(evt.getPropertyName())) { tc.disableNavigationButtons(); refresh(((Node[]) evt.getNewValue())); } else if (DiffController.PROP_DIFFERENCES.equals(evt.getPropertyName())) { synchronized(VIEW_LOCK) { if(diffView != null) { tc.refreshNavigationButtons(diffView.getDifferenceIndex(), diffView.getDifferenceCount()); } } } } JPanel getPanel() { return panel; } private void refresh(Node[] newSelection) { if(newSelection != null) { if (newSelection.length == 1) { HistoryEntry entry1 = newSelection[0].getLookup().lookup(HistoryEntry.class); if (entry1 != null) { VCSFileProxy file1 = getFile(newSelection[0], entry1); CompareMode mode = tc.getMode(); switch(mode) { case TOCURRENT: refreshCurrentDiffPanel(entry1, file1); return; case TOPARENT: refreshRevisionDiffPanel(null, entry1, null, file1); return; default: throw new IllegalStateException("Wrong mode selected: " + mode); // NOI18N } } } else if (newSelection.length == 2) { HistoryEntry entry1 = newSelection[0].getLookup().lookup(HistoryEntry.class); VCSFileProxy file1 = null; if (entry1 != null) { file1 = getFile(newSelection[0], entry1); } VCSFileProxy file2 = null; HistoryEntry entry2 = newSelection[1].getLookup().lookup(HistoryEntry.class); if (entry2 != null) { file2 = file1 = getFile(newSelection[1], entry2); } if(entry1 != null && entry2 != null && file1 != null && file2 != null) { if(entry1.getDateTime().getTime() > entry1.getDateTime().getTime()) { refreshRevisionDiffPanel(entry1, entry2, file1, file2); } else { refreshRevisionDiffPanel(entry2, entry1, file2, file1); } return; } } } String msgKey = (newSelection == null) || (newSelection.length == 0) ? "MSG_DiffPanel_NoVersion" //NOI18N : "MSG_DiffPanel_IllegalSelection"; //NOI18N showNoContent(NbBundle.getMessage(HistoryDiffView.class, msgKey)); } private void refreshRevisionDiffPanel(HistoryEntry entry1, HistoryEntry entry2, VCSFileProxy file1, VCSFileProxy file2) { onSelectionLastDifference = false; scheduleTask(new RevisionDiffPrepareTask(entry1, entry2, file1, file2, onSelectionLastDifference)); } private void refreshCurrentDiffPanel(HistoryEntry entry, VCSFileProxy file) { onSelectionLastDifference = false; scheduleTask(new CurrentDiffPrepareTask(entry, file, onSelectionLastDifference)); } private void scheduleTask(DiffTask newTask) { if(diffTask != null) { diffTask.cancel(); } diffTask = newTask; diffTask.schedule(); } private VCSFileProxy getFile(Node node, HistoryEntry entry) { Collection<? extends VCSFileProxy> proxies = node.getLookup().lookupAll(VCSFileProxy.class); if(proxies != null && proxies.size() == 1) { return proxies.iterator().next(); } else { VCSFileProxy[] files = entry.getFiles(); // HACK ensure that for form files .java is returned as default if(files.length == 2) { if((files[0].getName().endsWith(".java") && // NOI18N files[1].getName().endsWith(".form"))) // NOI18N { return files[0]; } if((files[1].getName().endsWith(".java") && // NOI18N files[0].getName().endsWith(".form"))) // NOI18N { return files[1]; } } return files[0]; } } private boolean onSelectionLastDifference = false; void onSelectionLastDifference() { onSelectionLastDifference = true; } void componentClosed () { DiffViewModeSwitcher.release(this); diffViewModeSwitcher = null; } private class CurrentDiffPrepareTask extends DiffTask { private final HistoryEntry entry; private final VCSFileProxy file; private final boolean selectLast; public CurrentDiffPrepareTask(final HistoryEntry entry, VCSFileProxy file, boolean selectLast) { this.entry = entry; this.file = file; this.selectLast = selectLast; } @Override public void run() { History.LOG.log( Level.FINE, "preparing current diff for: {0} - {1}", // NOI18N new Object[]{entry, file}); DiffController dv = getView(entry, file); if(isCancelled()) { return; } if(dv != null) { History.LOG.log(Level.FINE, "setting cached diff view for {0} - {1}", new Object[]{entry.getRevision(), file}); setDiffView(dv, selectLast); return; } File tmpFile; if(isCancelled()) { return; } startPrepareProgress(); FileObject tmpFo; try { File tempFolder = Utils.getTempFolder(); tmpFile = new File(tempFolder, file.getName()); // XXX entry.getRevisionFile(file, VCSFileProxy.createFileProxy(tmpFile)); tmpFo = FileUtil.toFileObject(tmpFile); History.LOG.log(Level.FINE, "retrieved revision file for {0} {1}", new Object[]{entry.getRevision(), file}); if(isCancelled()) { return; } } finally { finishPrepareProgress(); } String title1 = getTitle(entry, file); String title2; if(file.exists()) { title2 = "<html><b>" + NbBundle.getMessage(HistoryDiffView.class, "LBL_Diff_CurrentFile") + "</b></html>"; // NOI18N } else { title2 = NbBundle.getMessage(HistoryDiffView.class, "LBL_Diff_FileDeleted"); // NOI18N } dv = prepareDiffView(tmpFo, file.toFileObject(), true, file.exists(), title1, title2, true, selectLast); if(isCancelled()) { return; } if(dv != null) { History.LOG.log(Level.FINE, "setting diff view for {0} - {1}", new Object[]{entry.getRevision(), file}); setDiffView(dv, selectLast); putView(dv, entry, file); } } } private Map<String, DiffController> views = new ConcurrentHashMap<String, DiffController>(); private DiffController getView(HistoryEntry entry, VCSFileProxy file) { assert entry != null; if(entry == null) { return null; } return views.get(getKey(entry, file)); } private DiffController getView(HistoryEntry entry1, VCSFileProxy file1, HistoryEntry entry2) { assert entry1 != null && entry2 != null; if(entry1 == null && entry2 == null) { return null; } return views.get(getKey(entry1, file1, entry2)); } private void putView(DiffController dv, HistoryEntry entry1, VCSFileProxy file1, HistoryEntry entry2) { views.put(getKey(entry1, file1, entry2), dv); } private void putView(DiffController dv, HistoryEntry entry, VCSFileProxy file) { views.put(getKey(entry, file), dv); } private String getKey(HistoryEntry entry1, VCSFileProxy file1, HistoryEntry entry2) { return getKey(entry1, file1) + "_" + // NOI18N entry2.getRevision() + "_" + // NOI18N entry2.getDateTime().getTime(); } private String getKey(HistoryEntry entry, VCSFileProxy file) { return entry.getRevision() + "_" + // NOI18N entry.getDateTime().getTime() + "_" + file; } private class RevisionDiffPrepareTask extends DiffTask { private HistoryEntry entry1; private final HistoryEntry entry2; private VCSFileProxy file1; private final VCSFileProxy file2; private final boolean selectLast; public RevisionDiffPrepareTask(final HistoryEntry entry1, HistoryEntry entry2, VCSFileProxy file1, VCSFileProxy file2, boolean selectLast) { this.entry1 = entry1; this.entry2 = entry2; this.file1 = file1; this.file2 = file2; this.selectLast = selectLast; } @Override public void run() { History.LOG.log( Level.FINE, "preparing previous diff for: {0} - {1} and {2} - {3}", // NOI18N new Object[]{entry1, file1, entry2, file2}); startPrepareProgress(); FileObject revisionFo1; FileObject revisionFo2; try { if(entry1 == null && file1 == null) { entry1 = entry2.getParent(file2); if(entry1 == null) { entry1 = tc.getParentEntry(entry2); if(isCancelled()) { return; } } file1 = file2; if (entry1 == null) { EventQueue.invokeLater(new Runnable() { @Override public void run () { showNoContent(NbBundle.getMessage(HistoryDiffView.class, "MSG_DiffPanel_NoVersionToCompare")); // NOI18N } }); return; } } DiffController dv = getView(entry2, file2, entry1); if(isCancelled()) { return; } if(dv != null) { History.LOG.log( Level.FINE, "setting cached diff view for: {0} - {1} and {2} - {3}", // NOI18N new Object[]{entry1, file1, entry2, file2}); setDiffView(dv, selectLast); return; } revisionFo1 = getRevisionFile(entry1, file1); History.LOG.log(Level.FINE, "retrieved revision file for {0} - {1}", new Object[]{entry1.getRevision(), file1}); if(isCancelled()) { return; } revisionFo2 = getRevisionFile(entry2, file2); History.LOG.log(Level.FINE, "retrieved revision file for {0} - {1}", new Object[]{entry2.getRevision(), file2}); if(isCancelled()) { return; } } finally { finishPrepareProgress(); } String title1 = getTitle(entry1, file1); String title2 = getTitle(entry2, file2); DiffController dv = prepareDiffView(revisionFo1, revisionFo2, true, true, title1, title2, false, selectLast); if(isCancelled()) { return; } if(dv != null) { History.LOG.log( Level.FINE, "setting diff view for: {0} - {1} and {2} - {3}", // NOI18N new Object[]{entry1, file1, entry2, file2}); setDiffView(dv, selectLast); putView(dv, entry1, file1, entry2); } } private FileObject getRevisionFile(HistoryEntry entry, VCSFileProxy file) { File tempFolder = Utils.getTempFolder(); File revFile = new File(tempFolder, file.getName()); // XXX entry.getRevisionFile(file, VCSFileProxy.createFileProxy(revFile)); return FileUtil.toFileObject(revFile); } } private void setDiffView(final DiffController dv, final boolean selectLast) { final int diffCount = dv.getDifferenceCount(); final int diffIdx = dv.getDifferenceIndex(); synchronized(VIEW_LOCK) { diffView = dv; } SwingUtilities.invokeLater(new Runnable() { @Override public void run() { History.LOG.finer("invoked set diff view"); // NOI18N getDiffViewModeSwitcher().setupMode(dv); JComponent c = dv.getJComponent(); setDiffComponent(c); tc.setDiffView(c); // in case the diffview listener did not fire if(dv.getDifferenceCount() > 0) { setCurrentDifference(selectLast ? diffCount - 1 : 0); } else { tc.refreshNavigationButtons(diffIdx, diffCount); } panel.revalidate(); panel.repaint(); } }); } private DiffViewModeSwitcher getDiffViewModeSwitcher () { if (diffViewModeSwitcher == null) { diffViewModeSwitcher = DiffViewModeSwitcher.get(this); } return diffViewModeSwitcher; } private String getTitle(HistoryEntry entry, VCSFileProxy file) { String title1; if(file.exists()) { if(entry.isLocalHistory()) { title1 = "<html>" + file.getName() + " (<b>" + RevisionNode.getFormatedDate(entry) + "</b>)</html>"; // NOI18N } else { title1 = "<html>" + file.getName() + " (<b>" + entry.getRevisionShort() + "</b>)</html>"; // NOI18N } } else { title1 = NbBundle.getMessage(HistoryDiffView.class, "LBL_Diff_FileDeleted"); // NOI18N } return title1; } private DiffController prepareDiffView(final FileObject file1, final FileObject file2, boolean file1Exists, boolean file2Exists, final String title1, final String title2, final boolean editable, final boolean selectLast) { History.LOG.log( Level.FINE, "preparing diff view for: {0} - {1} and {2} - {3}", // NOI18N new Object[]{title1, file1, title2, file2}); // NOI18N StreamSource ss1; if(file1Exists) { ss1 = new LHStreamSource(file1, title1, getMimeType(file2), editable); } else { ss1 = StreamSource.createSource("currentfile", title1, getMimeType(file1), new StringReader("")); // NOI18N } StreamSource ss2; if(file2Exists) { ss2 = new LHStreamSource(file2, title2, getMimeType(file2), editable); } else { ss2 = StreamSource.createSource("currentfile", title2, getMimeType(file2), new StringReader("")); // NOI18N } final DiffController dv; try { dv = DiffController.createEnhanced(ss1, ss2); } catch (IOException ioe) { History.LOG.log(Level.SEVERE, null, ioe); return null; } dv.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (DiffController.PROP_DIFFERENCES.equals(evt.getPropertyName())) { dv.removePropertyChangeListener(this); int diffCount = dv.getDifferenceCount(); synchronized(VIEW_LOCK) { // diffView may already be a completely different view if (dv == diffView && diffCount > 0) { setCurrentDifference(selectLast ? diffCount - 1 : 0); } } } } }); dv.addPropertyChangeListener(HistoryDiffView.this); return dv; } private String getMimeType(FileObject file) { FileObject fo = file; if(fo != null) { return fo.getMIMEType(); } else { return "content/unknown"; // NOI18N } } private void showNoContent(String s) { setDiffComponent(new NoContentPanel(s)); } private void setDiffComponent(Component component) { if(diffComponent != null) { panel.diffPanel.remove(diffComponent); History.LOG.log(Level.FINEST, "replaced current diff component {0}", diffComponent); // NOI18N } panel.diffPanel.add(component, BorderLayout.CENTER); diffComponent = component; History.LOG.log(Level.FINEST, "added diff component {0}", diffComponent); // NOI18N panel.diffPanel.revalidate(); panel.diffPanel.repaint(); } void onNextButton() { synchronized(VIEW_LOCK) { if(diffView == null) { return; } int nextDiffernce = diffView.getDifferenceIndex() + 1; if(nextDiffernce < diffView.getDifferenceCount()) { setCurrentDifference(nextDiffernce); } } } void onPrevButton() { synchronized(VIEW_LOCK) { if(diffView == null) { return; } int prevDiffernce = diffView.getDifferenceIndex() - 1; if(prevDiffernce > -1) { setCurrentDifference(prevDiffernce); } } } void modeChanged() { refresh(tc.getSelectedNodes()); } private void setCurrentDifference(int idx) { synchronized(VIEW_LOCK) { if(diffView == null) { return; } diffView.setLocation(DiffController.DiffPane.Modified, DiffController.LocationType.DifferenceIndex, idx); tc.refreshNavigationButtons(diffView.getDifferenceIndex(), diffView.getDifferenceCount()); } } private class LHStreamSource extends StreamSource { private final FileObject file; private final String title; private final String mimeType; private final boolean editable; public LHStreamSource(FileObject file, String title, String mimeType, boolean editable) { this.file = file; this.title = title; this.mimeType = mimeType; this.editable = editable; } @Override public boolean isEditable() { return editable && isPrimary(file); } private boolean isPrimary(FileObject fo) { if (fo != null) { try { DataObject dao = DataObject.find(fo); return fo.equals(dao.getPrimaryFile()); } catch (DataObjectNotFoundException e) { // no dataobject, never mind } } return true; } @Override public Lookup getLookup() { if (file != null && isPrimary(file)) { return Lookups.fixed(file); } else { return Lookups.fixed(); } } @Override public String getName() { return title; } @Override public String getTitle() { return title; } @Override public String getMIMEType() { return mimeType; } @Override public Reader createReader() throws IOException { if(file != null) { return new InputStreamReader(file.getInputStream()); } return new StringReader(""); // NOI18N } @Override public Writer createWriter(Difference[] conflicts) throws IOException { return null; } } private abstract class DiffTask implements Runnable, Cancellable { private Task task = null; private boolean cancelled = false; private PreparingDiffHandler preparingDiffPanel; void startPrepareProgress() { preparingDiffPanel = new PreparingDiffHandler(); preparingDiffPanel.startPrepareProgress(); } void finishPrepareProgress() { preparingDiffPanel.finishPrepareProgress(); } @Override public synchronized boolean cancel() { cancelled = true; if(preparingDiffPanel != null) { preparingDiffPanel.finishPrepareProgress(); } if(task != null) { task.cancel(); } History.LOG.finer("cancelling DiffTask"); // NOI18N return true; } synchronized void schedule() { task = History.getInstance().getRequestProcessor().create(this); task.schedule(500); } synchronized protected boolean isCancelled() { if(cancelled) { History.LOG.finer("DiffTask is cancelled"); // NOI18N } return cancelled; } private class PreparingDiffHandler extends JPanel implements ActionListener { private JLabel label = new JLabel(); private Component progressComponent; private ProgressHandle handle; private final Timer timer = new Timer(0, this); private final Object TIMER_LOCK = new Object(); public PreparingDiffHandler() { label.setText(NbBundle.getMessage(HistoryDiffView.class, "LBL_PreparingDiff")); // NOI18N this.setBackground(UIManager.getColor("TextArea.background")); // NOI18N setLayout(new GridBagLayout()); GridBagConstraints c = new GridBagConstraints(); add(label, c); label.setEnabled(false); timer.setRepeats(false); } @Override public void actionPerformed(ActionEvent e) { if(isCancelled()) { return; } synchronized(TIMER_LOCK) { handle = ProgressHandleFactory.createHandle(NbBundle.getMessage(HistoryDiffView.class, "LBL_PreparingDiff")); // NOI18N setProgressComponent(ProgressHandleFactory.createProgressComponent(handle)); handle.start(); handle.switchToIndeterminate(); setDiffComponent(PreparingDiffHandler.this); } } void startPrepareProgress() { History.LOG.fine("starting prepare diff handler"); // NOI18N synchronized(TIMER_LOCK) { timer.start(); } } void finishPrepareProgress() { History.LOG.fine("finishing prepare diff handler"); // NOI18N synchronized(TIMER_LOCK) { timer.stop(); if(handle != null) { handle.finish(); } } } private void setProgressComponent(Component component) { if(progressComponent != null) remove(progressComponent); if(component != null) { this.progressComponent = component; GridBagConstraints gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.insets = new java.awt.Insets(0, 5, 0, 0); add(component, gridBagConstraints); } } } } }
14,319
4,054
# Generated by Django 2.2.24 on 2021-07-08 18:40 from django.db import migrations, models import django.db.models.deletion import readthedocs.projects.validators import uuid class Migration(migrations.Migration): initial = True dependencies = [ ('organizations', '0001_squashed'), ] operations = [ migrations.CreateModel( name='SSODomain', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('domain', models.CharField(max_length=128, unique=True, validators=[readthedocs.projects.validators.DomainNameValidator()])), ], ), migrations.CreateModel( name='SSOIntegration', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=128, null=True)), ('token', models.UUIDField(default=uuid.uuid4, unique=True)), ('organization', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='organizations.Organization')), ('provider', models.CharField(choices=[('allauth', 'AllAuth'), ('email', 'Email')], max_length=32)), ('domains', models.ManyToManyField(blank=True, related_name='ssointegrations', to='sso.SSODomain')), ], ), ]
643
476
<gh_stars>100-1000 /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.metadata; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.primitives.Primitives; import io.airlift.slice.Slice; import io.hetu.core.transport.block.BlockSerdeUtil; import io.prestosql.spi.block.Block; import io.prestosql.spi.connector.QualifiedObjectName; import io.prestosql.spi.function.BuiltInScalarFunctionImplementation; import io.prestosql.spi.function.Signature; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.FunctionType; import io.prestosql.spi.type.MapType; import io.prestosql.spi.type.RowType; import io.prestosql.spi.type.Type; import io.prestosql.spi.type.TypeSignature; import io.prestosql.spi.type.VarcharType; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.util.Set; import static com.google.common.base.Preconditions.checkArgument; import static io.prestosql.spi.connector.CatalogSchemaName.DEFAULT_NAMESPACE; import static io.prestosql.spi.function.BuiltInScalarFunctionImplementation.ArgumentProperty.valueTypeArgumentProperty; import static io.prestosql.spi.function.BuiltInScalarFunctionImplementation.NullConvention.RETURN_NULL_ON_NULL; import static io.prestosql.spi.function.FunctionKind.SCALAR; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.spi.type.BooleanType.BOOLEAN; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.TypeSignature.parseTypeSignature; import static io.prestosql.spi.type.VarbinaryType.VARBINARY; public class LiteralFunction extends SqlScalarFunction { static final String LITERAL_FUNCTION_NAME = "$literal$"; private static final Set<Class<?>> SUPPORTED_LITERAL_TYPES = ImmutableSet.of(long.class, double.class, Slice.class, boolean.class); public LiteralFunction() { super(new Signature(QualifiedObjectName.valueOf(DEFAULT_NAMESPACE, LITERAL_FUNCTION_NAME), SCALAR, parseTypeSignature("R"), parseTypeSignature("T"))); } @Override public boolean isHidden() { return true; } @Override public boolean isDeterministic() { return true; } @Override public String getDescription() { return "literal"; } @Override public BuiltInScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, FunctionAndTypeManager functionAndTypeManager) { Type parameterType = boundVariables.getTypeVariable("T"); Type type = boundVariables.getTypeVariable("R"); MethodHandle methodHandle = null; if (parameterType.getJavaType() == type.getJavaType()) { methodHandle = MethodHandles.identity(parameterType.getJavaType()); } if (parameterType.getJavaType() == Slice.class) { if (type.getJavaType() == Block.class) { methodHandle = BlockSerdeUtil.READ_BLOCK.bindTo(functionAndTypeManager.getBlockEncodingSerde()); } } checkArgument(methodHandle != null, "Expected type %s to use (or can be converted into) Java type %s, but Java type is %s", type, parameterType.getJavaType(), type.getJavaType()); return new BuiltInScalarFunctionImplementation( false, ImmutableList.of(valueTypeArgumentProperty(RETURN_NULL_ON_NULL)), methodHandle); } public static boolean isSupportedLiteralType(Type type) { if (type instanceof FunctionType) { // FunctionType contains compiled lambda thus not serializable. return false; } if (type instanceof ArrayType) { return isSupportedLiteralType(((ArrayType) type).getElementType()); } else if (type instanceof RowType) { RowType rowType = (RowType) type; return rowType.getTypeParameters().stream() .allMatch(LiteralFunction::isSupportedLiteralType); } else if (type instanceof MapType) { MapType mapType = (MapType) type; return isSupportedLiteralType(mapType.getKeyType()) && isSupportedLiteralType(mapType.getValueType()); } return SUPPORTED_LITERAL_TYPES.contains(type.getJavaType()); } public static long estimatedSizeInBytes(Object object) { if (object == null) { return 1; } Class<?> javaType = object.getClass(); if (javaType == Long.class) { return Long.BYTES; } else if (javaType == Double.class) { return Double.BYTES; } else if (javaType == Boolean.class) { return 1; } else if (object instanceof Block) { return ((Block) object).getSizeInBytes(); } else if (object instanceof Slice) { return ((Slice) object).length(); } // unknown for rest of types return Integer.MAX_VALUE; } public static Signature getLiteralFunctionSignature(Type type) { TypeSignature argumentType = typeForLiteralFunctionArgument(type).getTypeSignature(); return new Signature( QualifiedObjectName.valueOf(DEFAULT_NAMESPACE, LITERAL_FUNCTION_NAME + type.getTypeSignature()), SCALAR, type.getTypeSignature(), argumentType); } public static Type typeForLiteralFunctionArgument(Type type) { Class<?> clazz = type.getJavaType(); clazz = Primitives.unwrap(clazz); if (clazz == long.class) { return BIGINT; } if (clazz == double.class) { return DOUBLE; } if (!clazz.isPrimitive()) { if (type instanceof VarcharType) { return type; } else { return VARBINARY; } } if (clazz == boolean.class) { return BOOLEAN; } throw new IllegalArgumentException("Unhandled Java type: " + clazz.getName()); } }
2,781
5,169
{ "name": "Gridy", "version": "0.1.0", "summary": "Gridy performs all the calculations related to grid layout.", "description": "There are many situations when you need to manually calculate 2d grid layout (\nsuppose you're writing game like chess or 2048). Gridy performs all necessary\n calculations so you don't have to. It's written in Swift, highly customizable\nbut easy to use because it provides reasonable defaults for unused parameters. \nAlso Gridy is tested with XCTest test suite.", "homepage": "https://github.com/alexdoloz/Gridy", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/alexdoloz/Gridy.git", "tag": "0.1.0" }, "platforms": { "ios": "8.0" }, "source_files": "Pod/Classes/**/*", "pod_target_xcconfig": { "ENABLE_BITCODE": "NO", "OTHER_LDFLAGS": "-weak-lswiftXCTest", "FRAMEWORK_SEARCH_PATHS": "$(inherited) \"$(PLATFORM_DIR)/Developer/Library/Frameworks\"" } }
378
849
#!/usr/bin/env python3 """ Copyright 2020 The Magma Authors. This source code is licensed under the BSD-style license found in the LICENSE file in the root directory of this source tree. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import argparse import logging from integ_tests.s1aptests.ovs.rest_api import ( add_flowentry, delete_flowentry, get_datapath, get_flows, ) from scapy.all import IP, Ether, sendp logging.getLogger("scapy.runtime").setLevel(logging.ERROR) DEFAULT_PKT_MAC_SRC = "00:00:00:00:00:01" DEFAULT_PKT_MAC_DST = "12:99:cc:97:47:4e" def _simple_remove(args): del_old = {"dpid": datapath, "priority": args.priority} if args.cookie: del_old["cookie"] = args.cookie if args.table_id: del_old["table_id"] = args.table_id print(del_old) delete_flowentry(del_old) def _simple_get(args): query = {"table_id": args.table_id} flows = get_flows(datapath, query) print("FlowEntry Match : captured packets") for flowentry in flows: print("Prior ", flowentry["priority"], end=' - ') print(flowentry["match"], flowentry["packet_count"], sep=', pkts: ') def _simple_add(args): fields = { "dpid": datapath, "table_id": args.table_start, "priority": args.priority, "instructions": [{"type": "GOTO_TABLE", "table_id": args.table_end}], } if args.cookie: fields["cookie"] = args.cookie if args.reg1: reg1 = int(args.reg1, 0) fields["instructions"].append({ "type": "APPLY_ACTIONS", "actions": [{ "type": "SET_FIELD", "field": "reg1", "value": reg1, }], }) add_flowentry(fields) def _simple_send(args): eth = Ether(dst=DEFAULT_PKT_MAC_DST, src=DEFAULT_PKT_MAC_SRC) ip = IP(proto=1, src=args.ipv4_src, dst=args.ipv4_dst) pkt = eth / ip print(pkt.show()) sendp(pkt, iface=args.iface, count=args.num) def create_parser(): """ Creates the argparse parser with all the arguments. """ parser = argparse.ArgumentParser( description='CLI for testing packet movement through pipelined,\ using RYU REST API & Scapy', formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) # Add subcommands subparsers = parser.add_subparsers(title='subcommands', dest='cmd') parser_dump = subparsers.add_parser('dump', help='Dump packet stats') parser_dump.add_argument('table_id', help='table id to print', type=int) parser_send = subparsers.add_parser('send', help='Send packets') parser_send.add_argument('iface', help='iface to send to') parser_send.add_argument('-ipd', '--ipv4_dst', help='ipv4 dst for pkt') parser_send.add_argument('-ips', '--ipv4_src', help='ipv4 src for pkt') parser_send.add_argument( '-n', '--num', help='number of packets to send', default=5, type=int, ) parser_skip = subparsers.add_parser('skip', help='Add flowentry') parser_skip.add_argument( 'table_start', type=int, help='table to insert flowentry', ) parser_skip.add_argument( 'table_end', type=int, help='table to forward to', ) parser_skip.add_argument( '-c', '--cookie', default=0, type=int, help='flowentry cookie value', ) parser_skip.add_argument('-r1', '--reg1', help='flowentry reg1 value') parser_skip.add_argument( '-p', '--priority', help='flowentry priority', type=int, default=65535, ) parser_rem = subparsers.add_parser('rem', help='Remove flowentry') parser_rem.add_argument( '-tid', '--table_id', type=int, help='table to remove flowentry from', ) parser_rem.add_argument( '-p', '--priority', default=65535, type=int, help='rm flowentry matching priority value', ) parser_rem.add_argument( '-c', '--cookie', help='rm flowentry matching cookie value', ) # Add function callbacks parser_dump.set_defaults(func=_simple_get) parser_send.set_defaults(func=_simple_send) parser_skip.set_defaults(func=_simple_add) parser_rem.set_defaults(func=_simple_remove) return parser def main(): global datapath datapath = get_datapath() if not datapath: print("Coudn't get datapath") exit(1) parser = create_parser() # Parse the args args = parser.parse_args() if not args.cmd: parser.print_usage() exit(1) # Execute the subcommand function args.func(args) if __name__ == "__main__": main()
2,111
544
from d3m import container, utils as d3m_utils from d3m.base import utils as base_utils from d3m.metadata import base as metadata_base, hyperparams from d3m.primitive_interfaces import base, transformer from d3m.primitive_interfaces.base import CallResult, DockerContainer from typing import cast, Dict, List, Union, Sequence, Optional, Tuple from collections import OrderedDict from scipy import sparse import uuid import nimfa import pandas as pd import numpy from numpy import ndarray import warnings from ..common.TODSBasePrimitives import TODSTransformerPrimitiveBase __all__ = ('NonNegativeMatrixFactorizationPrimitive',) Inputs = container.DataFrame Outputs = container.DataFrame class Hyperparams(hyperparams.Hyperparams): rank = hyperparams.Hyperparameter[int]( default=30, semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="The factorization rank to achieve. Default is 30.", ) seed = hyperparams.Enumeration( values=['nndsvd','random_c','random_vcol','random','fixed'], default='random', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="""Method to seed the computation of a factorization""", ) W = hyperparams.Union( configuration=OrderedDict({ 'ndarray': hyperparams.Hyperparameter[ndarray]( default=numpy.array([]), semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter'], ), 'none': hyperparams.Constant( default=None, semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter'], ) }), default='none', description='Score weight by dimensions. If None, [1,1,...,1] will be used.', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'] ) H = hyperparams.Union( configuration=OrderedDict({ 'ndarray': hyperparams.Hyperparameter[ndarray]( default=numpy.array([]), semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter'], ), 'none': hyperparams.Constant( default=None, semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter'], ) }), default='none', description='Score weight by dimensions. If None, [1,1,...,1] will be used.', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'] ) update = hyperparams.Enumeration( values=['euclidean','divergence'], default='euclidean', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="""Type of update equations used in factorization. When specifying model parameter update can be assigned to:" 'euclidean' for classic Euclidean distance update equations," 'divergence' for divergence update equations." By default Euclidean update equations are used.""", ) objective = hyperparams.Enumeration( values=['fro','div','conn'], default='fro', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="""Type of objective function used in factorization. When specifying model parameter :param:`objective` can be assigned to: ‘fro’ for standard Frobenius distance cost function, ‘div’ for divergence of target matrix from NMF estimate cost function (KL), ‘conn’ for measuring the number of consecutive iterations in which the connectivity matrix has not changed. By default the standard Frobenius distance cost function is used.""", ) max_iter = hyperparams.Hyperparameter[int]( default=30, semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="Maximum number of factorization iterations. Note that the number of iterations depends on the speed of method convergence. Default is 30.", ) learning_rate = hyperparams.Union[Union[float, None]]( configuration=OrderedDict( limit=hyperparams.Bounded[float]( lower=0, upper=None, default=0.01, ), unlimited=hyperparams.Constant( default=None, description='If nothing is give as a paramter', ), ), default='unlimited', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="Minimal required improvement of the residuals from the previous iteration. They are computed between the target matrix and its MF estimate using the objective function associated to the MF algorithm. Default is None.", ) # parameters for column use_columns = hyperparams.Set( elements=hyperparams.Hyperparameter[int](-1), default=(2,3), semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="A set of column indices to force primitive to operate on. If any specified column cannot be parsed, it is skipped.", ) exclude_columns = hyperparams.Set( elements=hyperparams.Hyperparameter[int](-1), default=(), semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="A set of column indices to not operate on. Applicable only if \"use_columns\" is not provided.", ) return_result = hyperparams.Enumeration( values=['append', 'replace', 'new'], default='new', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="Should parsed columns be appended, should they replace original columns, or should only parsed columns be returned? This hyperparam is ignored if use_semantic_types is set to false.", ) use_semantic_types = hyperparams.UniformBool( default=False, semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="Controls whether semantic_types metadata will be used for filtering columns in input dataframe. Setting this to false makes the code ignore return_result and will produce only the output dataframe" ) add_index_columns = hyperparams.UniformBool( default=False, semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="Also include primary index columns if input data has them. Applicable only if \"return_result\" is set to \"new\".", ) error_on_no_input = hyperparams.UniformBool( default=True, semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'], description="Throw an exception if no input column is selected/provided. Defaults to true to behave like sklearn. To prevent pipelines from breaking set this to False.", ) return_semantic_type = hyperparams.Enumeration[str]( values=['https://metadata.datadrivendiscovery.org/types/Attribute', 'https://metadata.datadrivendiscovery.org/types/ConstructedAttribute'], default='https://metadata.datadrivendiscovery.org/types/Attribute', description='Decides what semantic type to attach to generated attributes', semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'] ) class NMF: def __init__(self, rank,W,H,seed,update,objective,max_iter,learning_rate): self._rank = rank self._seed = seed self._W = W, self._H = H, self._update = update self._objective = objective self._max_iter = max_iter self._learning_rate = learning_rate def produce(self, inputs): warnings.filterwarnings("ignore") # for removing warnings thrown by nimfa # for testing # a = numpy.array([[1,0,1,0,1],[1,0,1,0,1],[1,0,1,0,1]]) # b = numpy.array([[1,0],[1,0],[1,0],[1,0],[1,0]]) # print(type(a)) # print(type(self._W[0])) nmf = nimfa.Nmf(V = numpy.array(inputs.values), seed=self._seed, W=self._W[0], H=self._H[0], rank=self._rank, update = self._update, objective=self._objective, min_residuals=self._learning_rate ) nmf_fit = nmf() W = nmf_fit.basis() H = nmf_fit.coef() column_names = ['row_latent_vector_'+str(i) for i in range(self._rank)] W = pd.DataFrame(data = W,columns = column_names) # print(type(W)) #TODO: Column latent vector column_names = ['column_latent_vector_'+str(i) for i in range(inputs.shape[1])] H = pd.DataFrame(data = H,columns = column_names) W.reset_index(drop=True, inplace=True) H.reset_index(drop=True, inplace=True) result = pd.concat([W, H], axis=1) # print(result.head(10)) return result class NonNegativeMatrixFactorizationPrimitive(TODSTransformerPrimitiveBase[Inputs, Outputs, Hyperparams]): """ Calculates Latent factors of a given matrix of timeseries data Parameters ---------- rank: int The factorization rank to achieve. Default is 30. update: str Type of update equations used in factorization. When specifying model parameter update can be assigned to:" 'euclidean' for classic Euclidean distance update equations," 'divergence' for divergence update equations." By default Euclidean update equations are used. objective: str Type of objective function used in factorization. When specifying model parameter :param:`objective` can be assigned to: ‘fro’ for standard Frobenius distance cost function, ‘div’ for divergence of target matrix from NMF estimate cost function (KL), ‘conn’ for measuring the number of consecutive iterations in which the connectivity matrix has not changed. By default the standard Frobenius distance cost function is used. max_iter: int Maximum number of factorization iterations. Note that the number of iterations depends on the speed of method convergence. Default is 30. learning_rate: float Minimal required improvement of the residuals from the previous iteration. They are computed between the target matrix and its MF estimate using the objective function associated to the MF algorithm. Default is None. use_columns: Set A set of column indices to force primitive to operate on. If any specified column cannot be parsed, it is skipped. exclude_columns: Set A set of column indices to not operate on. Applicable only if \"use_columns\" is not provided. return_result: Enumeration Should parsed columns be appended, should they replace original columns, or should only parsed columns be returned? This hyperparam is ignored if use_semantic_types is set to false. use_semantic_types: Bool Controls whether semantic_types metadata will be used for filtering columns in input dataframe. Setting this to false makes the code ignore return_result and will produce only the output dataframe. add_index_columns: Bool Also include primary index columns if input data has them. Applicable only if \"return_result\" is set to \"new\". error_on_no_input: Bool( Throw an exception if no input column is selected/provided. Defaults to true to behave like sklearn. To prevent pipelines from breaking set this to False. return_semantic_type: Enumeration[str]( Decides what semantic type to attach to generated attributes' """ metadata = metadata_base.PrimitiveMetadata({ '__author__' : "DATA Lab @ Texas A&M University", 'name': "<NAME>", 'python_path': 'd3m.primitives.tods.feature_analysis.non_negative_matrix_factorization', 'source': { 'name': 'DATA Lab @ Texas A&M University', 'contact': 'mailto:<EMAIL>', }, 'hyperparameters_to_tune':['rank','update','objective','max_iter','learning_rate'], 'version': '0.0.1', 'algorithm_types': [ metadata_base.PrimitiveAlgorithmType.TODS_PRIMITIVE, ], 'primitive_family': metadata_base.PrimitiveFamily.FEATURE_CONSTRUCTION, 'id': str(uuid.uuid3(uuid.NAMESPACE_DNS, 'NonNegativeMatrixFactorizationPrimitive')), }) def __init__(self, *, hyperparams: Hyperparams) -> None: super().__init__(hyperparams=hyperparams) self._clf = NMF(rank=self.hyperparams['rank'], seed=self.hyperparams['seed'], W=self.hyperparams['W'], H=self.hyperparams['H'], objective=self.hyperparams['objective'], update=self.hyperparams['update'], max_iter=self.hyperparams['max_iter'], learning_rate = self.hyperparams['learning_rate'], ) def _produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> base.CallResult[Outputs]: assert isinstance(inputs, container.DataFrame), type(dataframe) self._fitted = False self._training_inputs, self._training_indices = self._get_columns_to_fit(inputs, self.hyperparams) self._input_column_names = self._training_inputs.columns if len(self._training_indices) > 0: # self._clf.fit(self._training_inputs) self._fitted = True else: if self.hyperparams['error_on_no_input']: raise RuntimeError("No input columns were selected") self.logger.warn("No input columns were selected") if not self._fitted: raise PrimitiveNotFittedError("Primitive not fitted.") sk_inputs = inputs if self.hyperparams['use_semantic_types']: cols = [inputs.columns[x] for x in self._training_indices] sk_inputs = container.DataFrame(data = inputs.iloc[:, self._training_indices].values,columns = cols, generate_metadata=True) output_columns = [] if len(self._training_indices) > 0: sk_output = self._clf.produce(sk_inputs) if sparse.issparse(sk_output): sk_output = sk_output.toarray() outputs = self._wrap_predictions(inputs, sk_output) # if len(outputs.columns) == len(self._input_column_names): # outputs.columns = self._input_column_names output_columns = [outputs] else: if self.hyperparams['error_on_no_input']: raise RuntimeError("No input columns were selected") self.logger.warn("No input columns were selected") outputs = base_utils.combine_columns(return_result=self.hyperparams['return_result'], add_index_columns=self.hyperparams['add_index_columns'], inputs=inputs, column_indices=self._training_indices, columns_list=output_columns) return base.CallResult(outputs) @classmethod def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams): """ Select columns to fit. Args: inputs: Container DataFrame hyperparams: d3m.metadata.hyperparams.Hyperparams Returns: list """ if not hyperparams['use_semantic_types']: return inputs, list(range(len(inputs.columns))) # return inputs, list(hyperparams['use_columns']) inputs_metadata = inputs.metadata def can_produce_column(column_index: int) -> bool: return cls._can_produce_column(inputs_metadata, column_index, hyperparams) columns_to_produce, columns_not_to_produce = base_utils.get_columns_to_use(inputs_metadata, use_columns=hyperparams['use_columns'], exclude_columns=hyperparams['exclude_columns'], can_use_column=can_produce_column) return inputs.iloc[:, columns_to_produce], columns_to_produce # return columns_to_produce @classmethod def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int, hyperparams: Hyperparams) -> bool: """ Output whether a column can be processed. Args: inputs_metadata: d3m.metadata.base.DataMetadata column_index: int Returns: bool """ column_metadata = inputs_metadata.query((metadata_base.ALL_ELEMENTS, column_index)) accepted_structural_types = (int, float, numpy.integer, numpy.float64,str) accepted_semantic_types = set() accepted_semantic_types.add("https://metadata.datadrivendiscovery.org/types/Attribute") if not issubclass(column_metadata['structural_type'], accepted_structural_types): print(column_index, "does not match the structural_type requirements in metadata. Skipping column") return False semantic_types = set(column_metadata.get('semantic_types', [])) # print("length sematic type",len(semantic_types)) # returing true for testing purposes for custom dataframes return True; if len(semantic_types) == 0: cls.logger.warning("No semantic types found in column metadata") return False # Making sure all accepted_semantic_types are available in semantic_types if len(accepted_semantic_types - semantic_types) == 0: return True # print(semantic_types) return False @classmethod def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]: # pragma: no cover """ Output metadata of selected columns. Args: outputs_metadata: metadata_base.DataMetadata hyperparams: d3m.metadata.hyperparams.Hyperparams Returns: d3m.metadata.base.DataMetadata """ outputs_length = outputs_metadata.query((metadata_base.ALL_ELEMENTS,))['dimension']['length'] target_columns_metadata: List[OrderedDict] = [] for column_index in range(outputs_length): column_metadata = OrderedDict(outputs_metadata.query_column(column_index)) # Update semantic types and prepare it for predicted targets. semantic_types = set(column_metadata.get('semantic_types', [])) semantic_types_to_remove = set([]) add_semantic_types = [] add_semantic_types.add(hyperparams["return_semantic_type"]) semantic_types = semantic_types - semantic_types_to_remove semantic_types = semantic_types.union(add_semantic_types) column_metadata['semantic_types'] = list(semantic_types) target_columns_metadata.append(column_metadata) return target_columns_metadata @classmethod def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs], target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata: """ Updata metadata for selected columns. Args: inputs_metadata: metadata_base.DataMetadata outputs: Container Dataframe target_columns_metadata: list Returns: d3m.metadata.base.DataMetadata """ outputs_metadata = metadata_base.DataMetadata().generate(value=outputs) for column_index, column_metadata in enumerate(target_columns_metadata): column_metadata.pop("structural_type", None) outputs_metadata = outputs_metadata.update_column(column_index, column_metadata) return outputs_metadata def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs: """ Wrap predictions into dataframe Args: inputs: Container Dataframe predictions: array-like data (n_samples, n_features) Returns: Dataframe """ outputs = container.DataFrame(predictions, generate_metadata=True) target_columns_metadata = self._add_target_columns_metadata(outputs.metadata,self.hyperparams) outputs.metadata = self._update_predictions_metadata(inputs.metadata, outputs, target_columns_metadata) # print(outputs.metadata.to_internal_simple_structure()) return outputs @classmethod def _add_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams): """ Add target columns metadata Args: outputs_metadata: metadata.base.DataMetadata hyperparams: d3m.metadata.hyperparams.Hyperparams Returns: List[OrderedDict] """ outputs_length = outputs_metadata.query((metadata_base.ALL_ELEMENTS,))['dimension']['length'] target_columns_metadata: List[OrderedDict] = [] for column_index in range(outputs_length): # column_name = "output_{}".format(column_index) column_metadata = OrderedDict() semantic_types = set() semantic_types.add(hyperparams["return_semantic_type"]) column_metadata['semantic_types'] = list(semantic_types) # column_metadata["name"] = str(column_name) target_columns_metadata.append(column_metadata) return target_columns_metadata NonNegativeMatrixFactorizationPrimitive.__doc__ = NonNegativeMatrixFactorizationPrimitive.__doc__
6,802
30,023
<gh_stars>1000+ """Tests for the sleepiq component."""
19
1,270
<filename>apps/remix-ide/manifest.json<gh_stars>1000+ { "name": "Solidity", "description": "Realtime compiler and runtime", "update_url": "https://remix.ethereum.org", "version": "1.1", "manifest_version": 2, "background": { "scripts": ["background.js"], "persistent": true }, "icons": { "32": "icon.png" }, "browser_action": { "default_icon": "icon.png" }, "permissions": [ "storage", "tabs", "activeTab", "https://ajax.googleapis.com/", "webRequest", "webRequestBlocking", "<all_urls>" ], "content_security_policy": "script-src 'self' https://binaries.soliditylang.org/; object-src 'self'" }
274
12,718
/*===---- clflushoptintrin.h - CLFLUSHOPT intrinsic ------------------------=== * * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. * See https://llvm.org/LICENSE.txt for license information. * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception * *===-----------------------------------------------------------------------=== */ #ifndef __IMMINTRIN_H #error "Never use <clflushoptintrin.h> directly; include <immintrin.h> instead." #endif #ifndef __CLFLUSHOPTINTRIN_H #define __CLFLUSHOPTINTRIN_H /* Define the default attributes for the functions in this file. */ #define __DEFAULT_FN_ATTRS __attribute__((__always_inline__, __nodebug__, __target__("clflushopt"))) static __inline__ void __DEFAULT_FN_ATTRS _mm_clflushopt(void const * __m) { __builtin_ia32_clflushopt(__m); } #undef __DEFAULT_FN_ATTRS #endif
290
852
#include "CondFormats/DataRecord/interface/SiPixelVCalRcd.h" #include "FWCore/Framework/interface/eventsetuprecord_registration_macro.h" EVENTSETUP_RECORD_REG(SiPixelVCalRcd);
66
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.search.documents.test.environment.models; import com.fasterxml.jackson.annotation.JsonProperty; public class Bucket { @JsonProperty(value = "BucketName") public String bucketName; @JsonProperty(value = "Count") public int count; public Bucket bucketName(String bucketName) { this.bucketName = bucketName; return this; } public Bucket count(int count) { this.count = count; return this; } }
202
2,843
<reponame>GggggitHub/LifeHelper package com.yc.configlayer.arounter; public interface RouterConfig { String SCHEME = "yc"; String ROUTER_SCHEME = "yc://"; String ROUTER_HOST = "cc、om.ns.yc.lifehelper"; String ROUTER_PRE = ROUTER_SCHEME + ROUTER_HOST; String PATH_TOP_LEVEL_VIEW = "/view"; String PATH_TOP_LEVEL_ACTION = "/action"; /** * 主App */ interface App { //设置中心 String ACTIVITY_APP_SETTING_ACTIVITY = "/app/MeSettingActivity"; } /** * Android模块 */ interface Android{ //跳转到玩Android模块的首页面 String ACTIVITY_ANDROID_ACTIVITY = "/android/AndroidActivity"; } /** * 豆瓣模块 */ interface DouBan{ //跳转到豆瓣电影页面 String ACTIVITY_DOU_MOVIE_ACTIVITY = "/dou/DouMovieActivity"; //跳转到豆瓣音乐页面 String ACTIVITY_DOU_MUSIC_ACTIVITY = "/dou/DouMusicActivity"; //跳转到豆瓣读书页面 String ACTIVITY_DOU_BOOK_ACTIVITY = "/dou/DouBookActivity"; //跳转到豆瓣电影评分榜页面 String ACTIVITY_DOU_TOP_ACTIVITY = "/dou/MovieTopActivity"; } /** * 游戏模块 */ interface Game{ //拼图游戏 String ACTIVITY_OTHER_PIN_TU_ACTIVITY = "/game/PinTuGameActivity"; //飞机大战 String ACTIVITY_OTHER_AIR_ACTIVITY = "/game/AirGameActivity"; //绘画页面 String ACTIVITY_BOOK_DOODLE_ACTIVITY = "/game/DoodleViewActivity"; //老虎机页面 String ACTIVITY_OTHER_MONKEY_ACTIVITY = "/game/MonkeyGameActivity"; } /** * Gank模块 */ interface Gank{ //我的干货页面 String ACTIVITY_GANK_KNOWLEDGE_ACTIVITY = "/gank/MyKnowledgeActivity"; //干货集中营首页 String ACTIVITY_GANK_ACTIVITY = "/gank/GanKHomeActivity"; } /** * 游戏模块 */ interface Love{ //表达爱意 String ACTIVITY_LOVE_ACTIVITY = "/love/LoveGirlMainActivity"; } /** * 音乐模块 */ interface Music{ //音乐首页 String ACTIVITY_MUSIC_ACTIVITY = "/music/MusicActivity"; //音乐启动页 String ACTIVITY_MUSIC_GUIDE_ACTIVITY = "/music/GuideMusicActivity"; } /** * 笔记模块 */ interface Note{ //富文本文章页面 String ACTIVITY_OTHER_ARTICLE = "/note/NewArticleActivity"; //markdown首页 String ACTIVITY_MARKDOWN_ACTIVITY = "/note/MdMainActivity"; } /** * 视频模块 */ interface Video{ //跳转到视频页面 String ACTIVITY_VIDEO_VIDEO = "/video/VideoActivity"; } /** * 基础模块 */ interface Demo{ //画廊页面 String ACTIVITY_OTHER_GALLERY_ACTIVITY = "/other/ImageGalleryActivity"; //加载大图页面 String ACTIVITY_LARGE_IMAGE_ACTIVITY = "/other/ZoomLargeImageActivity"; //轮播图 String ACTIVITY_OTHER_BANNER_ACTIVITY = "/other/MeBannerActivity"; //轮播图 String ACTIVITY_OTHER_BANNER_LIST_ACTIVITY = "/other/BannerViewActivity"; //SnapHelper轮播图 String ACTIVITY_OTHER_SNAPHELPER_ACTIVITY = "/other/SnapHelperActivity"; //关于仿杀毒软件进度条控件 String ACTIVITY_OTHER_PROGRESS1_ACTIVITY = "/other/ProgressFirstActivity"; //下载圆形百分比进度条自定义控件 String ACTIVITY_OTHER_PROGRESS2_ACTIVITY = "/other/ProgressSecondActivity"; //直线百分比进度条自定义控件 String ACTIVITY_OTHER_PROGRESS3_ACTIVITY = "/other/ProgressThirdActivity"; //跳转到意见反馈页面 String ACTIVITY_OTHER_FEEDBACK = "/other/MeFeedBackActivity"; //跳转到关于项目更多页面 String ACTIVITY_OTHER_ABOUT_ME = "/other/AboutMeActivity"; //跳转到登陆页面 String ACTIVITY_LOGIN_ACTIVITY = "/other/MeLoginActivity"; //跳转到注册页面 String ACTIVITY_REGISTER_ACTIVITY = "/other/MeRegisterActivity"; //跳转到画廊相册页面 String ACTIVITY_COVER_ACTIVITY = "/other/GalleryCoverActivity"; } /** * 基础模块 */ interface Library{ //跳转到webView详情页面 String ACTIVITY_LIBRARY_WEB_VIEW = "/library/WebViewActivity"; } /** * nfc */ interface Nfc { //跳转测试nfc页面 String ACTIVITY_NFC_MAIN = "/nfc/NfcMainActivity"; } }
2,473
686
<gh_stars>100-1000 /* Copyright (C) 1996-1997 Id Software, Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // cl_parse.c -- parse a message received from the server #include "cdaudio.h" #include "client.h" #include "cmd.h" #include "console.h" #include "host.h" #include "model.h" #include "net.h" #include "protocol.h" #include "quakedef.h" #include "sbar.h" #include "screen.h" #include "server.h" #include "sound.h" #include "sys.h" #ifdef GLQUAKE # include "glquake.h" #endif static const char *svc_strings[] = { "svc_bad", "svc_nop", "svc_disconnect", "svc_updatestat", "svc_version", // [long] server version "svc_setview", // [short] entity number "svc_sound", // <see code> "svc_time", // [float] server time "svc_print", // [string] null terminated string "svc_stufftext", // [string] stuffed into client's console buffer // the string should be \n terminated "svc_setangle", // [vec3] set the view angle to this absolute value "svc_serverinfo", // [long] version // [string] signon string // [string]..[0]model cache [string]...[0]sounds cache // [string]..[0]item cache "svc_lightstyle", // [byte] [string] "svc_updatename", // [byte] [string] "svc_updatefrags", // [byte] [short] "svc_clientdata", // <shortbits + data> "svc_stopsound", // <see code> "svc_updatecolors", // [byte] [byte] "svc_particle", // [vec3] <variable> "svc_damage", // [byte] impact [byte] blood [vec3] from "svc_spawnstatic", "OBSOLETE svc_spawnbinary", "svc_spawnbaseline", "svc_temp_entity", // <variable> "svc_setpause", "svc_signonnum", "svc_centerprint", "svc_killedmonster", "svc_foundsecret", "svc_spawnstaticsound", "svc_intermission", "svc_finale", // [string] music [string] text "svc_cdtrack", // [byte] track [byte] looptrack "svc_sellscreen", "svc_cutscene", "", // 35 "", // 36 "svc_fitz_skybox", "", // 38 "", // 39 "svc_fitz_bf", "svc_fitz_fog", "svc_fitz_spawnbaseline2", "svc_fitz_spawnstatic2", "svc_fitz_spawnstaticsound2", "", // 45 "", // 46 "", // 47 "", // 48 "", // 49 }; //============================================================================= /* =============== CL_EntityNum This error checks and tracks the total number of entities =============== */ entity_t * CL_EntityNum(int num) { if (num >= cl.num_entities) { if (num >= MAX_EDICTS) Host_Error("CL_EntityNum: %i is an invalid number", num); while (cl.num_entities <= num) { cl_entities[cl.num_entities].colormap = vid.colormap; cl.num_entities++; } } return &cl_entities[num]; } static int CL_ReadSoundNum(int field_mask) { switch (cl.protocol) { case PROTOCOL_VERSION_NQ: case PROTOCOL_VERSION_BJP: return MSG_ReadByte(); case PROTOCOL_VERSION_BJP2: case PROTOCOL_VERSION_BJP3: return (unsigned short)MSG_ReadShort(); case PROTOCOL_VERSION_FITZ: if (field_mask & SND_FITZ_LARGESOUND) return (unsigned short)MSG_ReadShort(); else return MSG_ReadByte(); default: Host_Error("%s: Unknown protocol version (%d)\n", __func__, cl.protocol); } } /* ================== CL_ParseStartSoundPacket ================== */ void CL_ParseStartSoundPacket(void) { vec3_t pos; int channel, ent; int sound_num; int volume; int field_mask; float attenuation; int i; field_mask = MSG_ReadByte(); if (field_mask & SND_VOLUME) volume = MSG_ReadByte(); else volume = DEFAULT_SOUND_PACKET_VOLUME; if (field_mask & SND_ATTENUATION) attenuation = MSG_ReadByte() / 64.0; else attenuation = DEFAULT_SOUND_PACKET_ATTENUATION; if (cl.protocol == PROTOCOL_VERSION_FITZ && (field_mask & SND_FITZ_LARGEENTITY)) { ent = (unsigned short)MSG_ReadShort(); channel = MSG_ReadByte(); } else { channel = MSG_ReadShort(); ent = channel >> 3; channel &= 7; } sound_num = CL_ReadSoundNum(field_mask); if (ent > MAX_EDICTS) Host_Error("CL_ParseStartSoundPacket: ent = %i", ent); for (i = 0; i < 3; i++) pos[i] = MSG_ReadCoord(); S_StartSound(ent, channel, cl.sound_precache[sound_num], pos, volume / 255.0, attenuation); } /* ================== CL_KeepaliveMessage When the client is taking a long time to load stuff, send keepalive messages so the server doesn't disconnect. ================== */ void CL_KeepaliveMessage(void) { float time; static float lastmsg; int ret; sizebuf_t old; byte olddata[8192]; if (sv.active) return; // no need if server is local if (cls.demoplayback) return; // read messages from server, should just be nops old = net_message; memcpy(olddata, net_message.data, net_message.cursize); do { ret = CL_GetMessage(); switch (ret) { case 0: break; // nothing waiting case 1: Host_Error("%s: received a message", __func__); case 2: if (MSG_ReadByte() != svc_nop) Host_Error("%s: datagram wasn't a nop", __func__); break; default: Host_Error("%s: CL_GetMessage failed", __func__); } } while (ret); net_message = old; memcpy(net_message.data, olddata, net_message.cursize); // check time time = Sys_DoubleTime(); if (time - lastmsg < 5) return; lastmsg = time; // write out a nop Con_Printf("--> client to server keepalive\n"); MSG_WriteByte(&cls.message, clc_nop); NET_SendMessage(cls.netcon, &cls.message); SZ_Clear(&cls.message); } /* ================== CL_ParseServerInfo ================== */ void CL_ParseServerInfo(void) { char *level; const char *mapname; int i, maxlen; int nummodels, numsounds; char model_precache[MAX_MODELS][MAX_QPATH]; char sound_precache[MAX_SOUNDS][MAX_QPATH]; Con_DPrintf("Serverinfo packet received.\n"); // // wipe the client_state_t struct // CL_ClearState(); // parse protocol version number i = MSG_ReadLong(); if (!Protocol_Known(i)) { Con_Printf("Server returned unknown protocol version %i\n", i); return; } cl.protocol = i; // parse maxclients cl.maxclients = MSG_ReadByte(); if (cl.maxclients < 1 || cl.maxclients > MAX_SCOREBOARD) { Con_Printf("Bad maxclients (%u) from server\n", cl.maxclients); return; } cl.players = Hunk_AllocName(cl.maxclients * sizeof(*cl.players), "players"); // parse gametype cl.gametype = MSG_ReadByte(); // parse signon message level = cl.levelname; maxlen = sizeof(cl.levelname); snprintf(level, maxlen, "%s", MSG_ReadString()); // seperate the printfs so the server message can have a color Con_Printf("\n\n\35\36\36\36\36\36\36\36\36\36\36\36\36\36\36\36\36\36\36" "\36\36\36\36\36\36\36\36\36\36\36\36\36\36\36\36\36\37\n\n"); Con_Printf("%c%s\n", 2, level); Con_Printf("Using protocol %i\n", cl.protocol); // // first we go through and touch all of the precache data that still // happens to be in the cache, so precaching something else doesn't // needlessly purge it // // precache models memset(cl.model_precache, 0, sizeof(cl.model_precache)); for (nummodels = 1;; nummodels++) { char *in, *model; in = MSG_ReadString(); if (!in[0]) break; if (nummodels == max_models(cl.protocol)) { Host_Error("Server sent too many model precaches (max = %d)", max_models(cl.protocol)); return; } model = model_precache[nummodels]; maxlen = sizeof(model_precache[0]); snprintf(model, maxlen, "%s", in); Mod_TouchModel(model); } // precache sounds memset(cl.sound_precache, 0, sizeof(cl.sound_precache)); for (numsounds = 1;; numsounds++) { char *in, *sound; in = MSG_ReadString(); if (!in[0]) break; if (numsounds == max_sounds(cl.protocol)) { Host_Error("Server sent too many sound precaches (max = %d)", max_sounds(cl.protocol)); return; } sound = sound_precache[numsounds]; maxlen = sizeof(sound_precache[0]); snprintf(sound, maxlen, "%s", in); S_TouchSound(sound); } // copy the naked name of the map file to the cl structure mapname = COM_SkipPath(model_precache[1]); COM_StripExtension(mapname, cl.mapname, sizeof(cl.mapname)); // // now we try to load everything else until a cache allocation fails // for (i = 1; i < nummodels; i++) { cl.model_precache[i] = Mod_ForName(model_precache[i], false); if (cl.model_precache[i] == NULL) { Con_Printf("Model %s not found\n", model_precache[i]); return; } CL_KeepaliveMessage(); } S_BeginPrecaching(); for (i = 1; i < numsounds; i++) { cl.sound_precache[i] = S_PrecacheSound(sound_precache[i]); CL_KeepaliveMessage(); } S_EndPrecaching(); // local state cl_entities[0].model = cl.model_precache[1]; cl.worldmodel = BrushModel(cl_entities[0].model); R_NewMap(); Hunk_Check(); // make sure nothing is hurt noclip_anglehack = false; // noclip is turned off at start } static int CL_ReadModelIndex(unsigned int bits) { switch (cl.protocol) { case PROTOCOL_VERSION_NQ: return MSG_ReadByte(); case PROTOCOL_VERSION_BJP: case PROTOCOL_VERSION_BJP2: case PROTOCOL_VERSION_BJP3: return MSG_ReadShort(); case PROTOCOL_VERSION_FITZ: if (bits & B_FITZ_LARGEMODEL) return MSG_ReadShort(); return MSG_ReadByte(); default: Host_Error("%s: Unknown protocol version (%d)\n", __func__, cl.protocol); } } static int CL_ReadModelFrame(unsigned int bits) { switch (cl.protocol) { case PROTOCOL_VERSION_NQ: case PROTOCOL_VERSION_BJP: case PROTOCOL_VERSION_BJP2: case PROTOCOL_VERSION_BJP3: return MSG_ReadByte(); case PROTOCOL_VERSION_FITZ: if (bits & B_FITZ_LARGEFRAME) return MSG_ReadShort(); return MSG_ReadByte(); default: Host_Error("%s: Unknown protocol version (%d)\n", __func__, cl.protocol); } } /* ================== CL_ParseUpdate Parse an entity update message from the server If an entities model or origin changes from frame to frame, it must be relinked. Other attributes can change without relinking. ================== */ void CL_ParseUpdate(unsigned int bits) { int i; model_t *model; int modnum; qboolean forcelink; entity_t *ent; int num; // FIXME - do this cleanly... #ifdef GLQUAKE int skin; #endif if (cls.state == ca_firstupdate) { // first update is the final signon stage cls.signon = SIGNONS; CL_SignonReply(); } if (bits & U_MOREBITS) { i = MSG_ReadByte(); bits |= (i << 8); } if (cl.protocol == PROTOCOL_VERSION_FITZ) { if (bits & U_FITZ_EXTEND1) bits |= MSG_ReadByte() << 16; if (bits & U_FITZ_EXTEND2) bits |= MSG_ReadByte() << 24; } if (bits & U_LONGENTITY) num = MSG_ReadShort(); else num = MSG_ReadByte(); ent = CL_EntityNum(num); if (ent->msgtime != cl.mtime[1]) forcelink = true; // no previous frame to lerp from else forcelink = false; ent->msgtime = cl.mtime[0]; if (bits & U_MODEL) { modnum = CL_ReadModelIndex(0); if (modnum >= max_models(cl.protocol)) Host_Error("CL_ParseModel: bad modnum"); } else modnum = ent->baseline.modelindex; if (bits & U_FRAME) ent->frame = MSG_ReadByte(); else ent->frame = ent->baseline.frame; /* ANIMATION LERPING INFO */ if (ent->currentframe != ent->frame) { /* TODO: invalidate things when they fall off the currententities list or haven't been updated for a while */ ent->previousframe = ent->currentframe; ent->previousframetime = ent->currentframetime; ent->currentframe = ent->frame; ent->currentframetime = cl.time; } if (bits & U_COLORMAP) i = MSG_ReadByte(); else i = ent->baseline.colormap; if (!i) ent->colormap = vid.colormap; else { if (i > cl.maxclients) Sys_Error("i >= cl.maxclients"); ent->colormap = cl.players[i - 1].translations; } #ifdef GLQUAKE if (bits & U_SKIN) skin = MSG_ReadByte(); else skin = ent->baseline.skinnum; if (skin != ent->skinnum) { ent->skinnum = skin; if (num > 0 && num <= cl.maxclients) R_TranslatePlayerSkin(num - 1); } #else if (bits & U_SKIN) ent->skinnum = MSG_ReadByte(); else ent->skinnum = ent->baseline.skinnum; #endif if (bits & U_EFFECTS) ent->effects = MSG_ReadByte(); else ent->effects = ent->baseline.effects; // shift the known values for interpolation VectorCopy(ent->msg_origins[0], ent->msg_origins[1]); VectorCopy(ent->msg_angles[0], ent->msg_angles[1]); if (bits & U_ORIGIN1) ent->msg_origins[0][0] = MSG_ReadCoord(); else ent->msg_origins[0][0] = ent->baseline.origin[0]; if (bits & U_ANGLE1) ent->msg_angles[0][0] = MSG_ReadAngle(); else ent->msg_angles[0][0] = ent->baseline.angles[0]; if (bits & U_ORIGIN2) ent->msg_origins[0][1] = MSG_ReadCoord(); else ent->msg_origins[0][1] = ent->baseline.origin[1]; if (bits & U_ANGLE2) ent->msg_angles[0][1] = MSG_ReadAngle(); else ent->msg_angles[0][1] = ent->baseline.angles[1]; if (bits & U_ORIGIN3) ent->msg_origins[0][2] = MSG_ReadCoord(); else ent->msg_origins[0][2] = ent->baseline.origin[2]; if (bits & U_ANGLE3) ent->msg_angles[0][2] = MSG_ReadAngle(); else ent->msg_angles[0][2] = ent->baseline.angles[2]; if (cl.protocol == PROTOCOL_VERSION_FITZ) { if (bits & U_NOLERP) { // FIXME - TODO (called U_STEP in FQ) } if (bits & U_FITZ_ALPHA) { MSG_ReadByte(); // FIXME - TODO } if (bits & U_FITZ_FRAME2) ent->frame = (ent->frame & 0xFF) | (MSG_ReadByte() << 8); if (bits & U_FITZ_MODEL2) modnum = (modnum & 0xFF)| (MSG_ReadByte() << 8); if (bits & U_FITZ_LERPFINISH) { MSG_ReadByte(); // FIXME - TODO } } model = cl.model_precache[modnum]; if (model != ent->model) { ent->model = model; // automatic animation (torches, etc) can be either all together // or randomized if (model) { if (model->synctype == ST_RAND) ent->syncbase = (float)(rand() & 0x7fff) / 0x7fff; else ent->syncbase = 0.0; } else forcelink = true; // hack to make null model players work #ifdef GLQUAKE if (num > 0 && num <= cl.maxclients) R_TranslatePlayerSkin(num - 1); #endif } /* MOVEMENT LERP INFO - could I just extend baseline instead? */ if (!VectorCompare(ent->msg_origins[0], ent->currentorigin)) { if (ent->currentorigintime) { VectorCopy(ent->currentorigin, ent->previousorigin); ent->previousorigintime = ent->currentorigintime; } else { VectorCopy(ent->msg_origins[0], ent->previousorigin); ent->previousorigintime = cl.mtime[0]; } VectorCopy(ent->msg_origins[0], ent->currentorigin); ent->currentorigintime = cl.mtime[0]; } if (!VectorCompare(ent->msg_angles[0], ent->currentangles)) { if (ent->currentanglestime) { VectorCopy(ent->currentangles, ent->previousangles); ent->previousanglestime = ent->currentanglestime; } else { VectorCopy(ent->msg_angles[0], ent->previousangles); ent->previousanglestime = cl.mtime[0]; } VectorCopy(ent->msg_angles[0], ent->currentangles); ent->currentanglestime = cl.mtime[0]; } if (bits & U_NOLERP) ent->forcelink = true; if (forcelink) { // didn't have an update last message VectorCopy(ent->msg_origins[0], ent->msg_origins[1]); VectorCopy(ent->msg_origins[0], ent->origin); VectorCopy(ent->msg_angles[0], ent->msg_angles[1]); VectorCopy(ent->msg_angles[0], ent->angles); ent->forcelink = true; } } /* ================== CL_ParseBaseline ================== */ static void CL_ParseBaseline(entity_t *ent, unsigned int bits) { int i; ent->baseline.modelindex = CL_ReadModelIndex(bits); ent->baseline.frame = CL_ReadModelFrame(bits); ent->baseline.colormap = MSG_ReadByte(); ent->baseline.skinnum = MSG_ReadByte(); for (i = 0; i < 3; i++) { ent->baseline.origin[i] = MSG_ReadCoord(); ent->baseline.angles[i] = MSG_ReadAngle(); } if (cl.protocol == PROTOCOL_VERSION_FITZ && (bits & B_FITZ_ALPHA)) { MSG_ReadByte(); // FIXME - TODO } } /* ================== CL_ParseClientdata Server information pertaining to this client only ================== */ void CL_ParseClientdata(void) { int i, j; unsigned int bits; bits = (unsigned short)MSG_ReadShort(); if (bits & SU_FITZ_EXTEND1) bits |= MSG_ReadByte() << 16; if (bits & SU_FITZ_EXTEND2) bits |= MSG_ReadByte() << 24; if (bits & SU_VIEWHEIGHT) cl.viewheight = MSG_ReadChar(); else cl.viewheight = DEFAULT_VIEWHEIGHT; if (bits & SU_IDEALPITCH) cl.idealpitch = MSG_ReadChar(); else cl.idealpitch = 0; VectorCopy(cl.mvelocity[0], cl.mvelocity[1]); for (i = 0; i < 3; i++) { if (bits & (SU_PUNCH1 << i)) cl.punchangle[i] = MSG_ReadChar(); else cl.punchangle[i] = 0; if (bits & (SU_VELOCITY1 << i)) cl.mvelocity[0][i] = MSG_ReadChar() * 16; else cl.mvelocity[0][i] = 0; } // [always sent] if (bits & SU_ITEMS) i = MSG_ReadLong(); if (cl.stats[STAT_ITEMS] != i) { // set flash times Sbar_Changed(); for (j = 0; j < 32; j++) if ((i & (1 << j)) && !(cl.stats[STAT_ITEMS] & (1 << j))) cl.item_gettime[j] = cl.time; cl.stats[STAT_ITEMS] = i; } cl.onground = (bits & SU_ONGROUND) != 0; cl.inwater = (bits & SU_INWATER) != 0; if (bits & SU_WEAPONFRAME) cl.stats[STAT_WEAPONFRAME] = MSG_ReadByte(); else cl.stats[STAT_WEAPONFRAME] = 0; if (bits & SU_ARMOR) i = MSG_ReadByte(); else i = 0; if (cl.stats[STAT_ARMOR] != i) { cl.stats[STAT_ARMOR] = i; Sbar_Changed(); } if (bits & SU_WEAPON) i = CL_ReadModelIndex(0); else i = 0; if (cl.stats[STAT_WEAPON] != i) { cl.stats[STAT_WEAPON] = i; Sbar_Changed(); } i = MSG_ReadShort(); if (cl.stats[STAT_HEALTH] != i) { cl.stats[STAT_HEALTH] = i; Sbar_Changed(); } i = MSG_ReadByte(); if (cl.stats[STAT_AMMO] != i) { cl.stats[STAT_AMMO] = i; Sbar_Changed(); } for (i = 0; i < 4; i++) { j = MSG_ReadByte(); if (cl.stats[STAT_SHELLS + i] != j) { cl.stats[STAT_SHELLS + i] = j; Sbar_Changed(); } } i = MSG_ReadByte(); if (standard_quake) { if (cl.stats[STAT_ACTIVEWEAPON] != i) { cl.stats[STAT_ACTIVEWEAPON] = i; Sbar_Changed(); } } else { if (cl.stats[STAT_ACTIVEWEAPON] != (1 << i)) { cl.stats[STAT_ACTIVEWEAPON] = (1 << i); Sbar_Changed(); } } /* FITZ Protocol */ if (bits & SU_FITZ_WEAPON2) cl.stats[STAT_WEAPON] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_ARMOR2) cl.stats[STAT_ARMOR] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_AMMO2) cl.stats[STAT_AMMO] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_SHELLS2) cl.stats[STAT_SHELLS] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_NAILS2) cl.stats[STAT_NAILS] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_ROCKETS2) cl.stats[STAT_ROCKETS] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_CELLS2) cl.stats[STAT_CELLS] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_WEAPONFRAME2) cl.stats[STAT_WEAPONFRAME] |= MSG_ReadByte() << 8; if (bits & SU_FITZ_WEAPONALPHA) MSG_ReadByte(); // FIXME - TODO } /* ===================== CL_NewTranslation ===================== */ void CL_NewTranslation(int slot) { int i, j; int top, bottom; byte *dest, *source; if (slot > cl.maxclients) Sys_Error("%s: slot > cl.maxclients", __func__); dest = cl.players[slot].translations; source = vid.colormap; memcpy(dest, vid.colormap, sizeof(cl.players[slot].translations)); top = cl.players[slot].topcolor; bottom = cl.players[slot].bottomcolor; #ifdef GLQUAKE R_TranslatePlayerSkin(slot); #endif for (i = 0; i < VID_GRADES; i++, dest += 256, source += 256) { if (top < 128) // the artists made some backwards ranges. sigh. memcpy(dest + TOP_RANGE, source + top, 16); else for (j = 0; j < 16; j++) dest[TOP_RANGE + j] = source[top + 15 - j]; if (bottom < 128) memcpy(dest + BOTTOM_RANGE, source + bottom, 16); else for (j = 0; j < 16; j++) dest[BOTTOM_RANGE + j] = source[bottom + 15 - j]; } } /* ===================== CL_ParseStatic ===================== */ void CL_ParseStatic(unsigned int bits) { entity_t *ent; int i; i = cl.num_statics; if (i >= MAX_STATIC_ENTITIES) Host_Error("Too many static entities"); ent = &cl_static_entities[i]; cl.num_statics++; CL_ParseBaseline(ent, bits); // copy it to the current state ent->model = cl.model_precache[ent->baseline.modelindex]; ent->frame = ent->baseline.frame; ent->colormap = vid.colormap; ent->skinnum = ent->baseline.skinnum; ent->effects = ent->baseline.effects; /* Initilise frames for model lerp */ ent->currentframe = ent->baseline.frame; ent->previousframe = ent->baseline.frame; ent->currentframetime = cl.time; ent->previousframetime = cl.time; /* Initialise movelerp data */ ent->previousorigintime = cl.time; ent->currentorigintime = cl.time; VectorCopy(ent->baseline.origin, ent->previousorigin); VectorCopy(ent->baseline.origin, ent->currentorigin); VectorCopy(ent->baseline.angles, ent->previousangles); VectorCopy(ent->baseline.angles, ent->currentangles); VectorCopy(ent->baseline.origin, ent->origin); VectorCopy(ent->baseline.angles, ent->angles); R_AddEfrags(ent); } static int CL_ReadSoundNum_Static(void) { switch (cl.protocol) { case PROTOCOL_VERSION_NQ: case PROTOCOL_VERSION_BJP: case PROTOCOL_VERSION_BJP3: case PROTOCOL_VERSION_FITZ: return MSG_ReadByte(); case PROTOCOL_VERSION_BJP2: return MSG_ReadShort(); default: Host_Error("%s: Unknown protocol version (%d)\n", __func__, cl.protocol); } } /* =================== CL_ParseStaticSound =================== */ static void CL_ParseStaticSound(void) { vec3_t org; int sound_num, vol, atten; int i; for (i = 0; i < 3; i++) org[i] = MSG_ReadCoord(); sound_num = CL_ReadSoundNum_Static(); vol = MSG_ReadByte(); atten = MSG_ReadByte(); S_StaticSound(cl.sound_precache[sound_num], org, vol, atten); } /* FITZ protocol */ static void CL_ParseFitzStaticSound2(void) { vec3_t org; int sound_num, vol, atten; int i; for (i = 0; i < 3; i++) org[i] = MSG_ReadCoord(); sound_num = MSG_ReadShort(); vol = MSG_ReadByte(); atten = MSG_ReadByte(); S_StaticSound(cl.sound_precache[sound_num], org, vol, atten); } /* helper function (was a macro, hence the CAPS) */ static void SHOWNET(const char *msg) { if (cl_shownet.value == 2) Con_Printf("%3i:%s\n", msg_readcount - 1, msg); } /* ===================== CL_ParseServerMessage ===================== */ void CL_ParseServerMessage(void) { unsigned int bits; int i, cmd, prevcmd; int playernum, version, stylenum, signon, statnum; int stopsound, entitynum, channel; byte colors; player_info_t *player; lightstyle_t *style; const char *stylemap, *name; // // if recording demos, copy the message out // if (cl_shownet.value == 1) Con_Printf("%i ", net_message.cursize); else if (cl_shownet.value == 2) Con_Printf("------------------\n"); cl.onground = false; // unless the server says otherwise // // parse the message // prevcmd = svc_bad; MSG_BeginReading(); while (1) { if (msg_badread) Host_Error("%s: Bad server message", __func__); cmd = MSG_ReadByte(); if (cmd == -1) { SHOWNET("END OF MESSAGE"); return; // end of message } // if the high bit of the command byte is set, it is a fast update if (cmd & 128) { SHOWNET("fast update"); CL_ParseUpdate(cmd & 127); continue; } SHOWNET(svc_strings[cmd]); // other commands switch (cmd) { case svc_nop: break; case svc_time: cl.mtime[1] = cl.mtime[0]; cl.mtime[0] = MSG_ReadFloat(); break; case svc_clientdata: CL_ParseClientdata(); break; case svc_version: version = MSG_ReadLong(); if (!Protocol_Known(version)) Host_Error("%s: Server returned unknown protocol version %i", __func__, version); cl.protocol = version; break; case svc_disconnect: Host_EndGame("Server disconnected\n"); case svc_print: Con_Printf("%s", MSG_ReadString()); break; case svc_centerprint: SCR_CenterPrint(MSG_ReadString()); break; case svc_stufftext: Cbuf_AddText("%s", MSG_ReadString()); break; case svc_damage: V_ParseDamage(); break; case svc_serverinfo: CL_ParseServerInfo(); vid.recalc_refdef = true; // leave intermission full screen break; case svc_setangle: for (i = 0; i < 3; i++) cl.viewangles[i] = MSG_ReadAngle(); break; case svc_setview: cl.viewentity = MSG_ReadShort(); break; case svc_lightstyle: stylenum = MSG_ReadByte(); if (stylenum >= MAX_LIGHTSTYLES) Sys_Error("svc_lightstyle > MAX_LIGHTSTYLES"); stylemap = MSG_ReadString(); style = cl_lightstyle + stylenum; snprintf(style->map, MAX_STYLESTRING, "%s", stylemap); style->length = strlen(style->map); break; case svc_sound: CL_ParseStartSoundPacket(); break; case svc_stopsound: stopsound = MSG_ReadShort(); /* 3-bit channel encoded in lsb */ entitynum = stopsound >> 3; channel = stopsound & 7; S_StopSound(entitynum, channel); break; case svc_updatename: Sbar_Changed(); playernum = MSG_ReadByte(); if (playernum >= cl.maxclients) Host_Error("%s: svc_updatename > MAX_SCOREBOARD", __func__); name = MSG_ReadString(); player = cl.players + playernum; snprintf(player->name, MAX_SCOREBOARDNAME, "%s", name); break; case svc_updatefrags: Sbar_Changed(); playernum = MSG_ReadByte(); if (playernum >= cl.maxclients) Host_Error("%s: svc_updatefrags > MAX_SCOREBOARD", __func__); player = cl.players + playernum; player->frags = MSG_ReadShort(); break; case svc_updatecolors: Sbar_Changed(); playernum = MSG_ReadByte(); if (playernum >= cl.maxclients) Host_Error("%s: svc_updatecolors > MAX_SCOREBOARD", __func__); colors = MSG_ReadByte(); player = cl.players + playernum; player->topcolor = (colors & 0xf0) >> 4; player->bottomcolor = colors & 0x0f; /* FIXME - is this the right check for current player? */ if (playernum == cl.viewentity) cl_color.value = colors; CL_NewTranslation(playernum); break; case svc_particle: R_ParseParticleEffect(); break; case svc_spawnbaseline: entitynum = MSG_ReadShort(); // must use CL_EntityNum() to force cl.num_entities up CL_ParseBaseline(CL_EntityNum(entitynum), 0); break; case svc_fitz_spawnbaseline2: /* FIXME - check here that protocol is FITZ? => Host_Error() */ entitynum = MSG_ReadShort(); bits = MSG_ReadByte(); // must use CL_EntityNum() to force cl.num_entities up CL_ParseBaseline(CL_EntityNum(entitynum), bits); break; case svc_spawnstatic: CL_ParseStatic(0); break; case svc_fitz_spawnstatic2: /* FIXME - check here that protocol is FITZ? => Host_Error() */ bits = MSG_ReadByte(); CL_ParseStatic(bits); break; case svc_temp_entity: CL_ParseTEnt(); break; case svc_setpause: cl.paused = MSG_ReadByte(); if (cl.paused) CDAudio_Pause(); else CDAudio_Resume(); break; case svc_signonnum: signon = MSG_ReadByte(); if (signon <= cls.signon) Host_Error("Received signon %d when at %d", signon, cls.signon); cls.signon = signon; CL_SignonReply(); break; case svc_killedmonster: cl.stats[STAT_MONSTERS]++; break; case svc_foundsecret: cl.stats[STAT_SECRETS]++; break; case svc_updatestat: statnum = MSG_ReadByte(); if (statnum < 0 || statnum >= MAX_CL_STATS) Sys_Error("svc_updatestat: %d is invalid", statnum); cl.stats[statnum] = MSG_ReadLong(); break; case svc_spawnstaticsound: CL_ParseStaticSound(); break; case svc_fitz_spawnstaticsound2: /* FIXME - check here that protocol is FITZ? => Host_Error() */ CL_ParseFitzStaticSound2(); break; case svc_cdtrack: cl.cdtrack = MSG_ReadByte(); cl.looptrack = MSG_ReadByte(); if ((cls.demoplayback || cls.demorecording) && (cls.forcetrack != -1)) CDAudio_Play((byte)cls.forcetrack, true); else CDAudio_Play((byte)cl.cdtrack, true); break; case svc_intermission: cl.intermission = 1; cl.completed_time = cl.time; vid.recalc_refdef = true; // go to full screen break; case svc_finale: cl.intermission = 2; cl.completed_time = cl.time; vid.recalc_refdef = true; // go to full screen SCR_CenterPrint(MSG_ReadString()); break; case svc_cutscene: cl.intermission = 3; cl.completed_time = cl.time; vid.recalc_refdef = true; // go to full screen SCR_CenterPrint(MSG_ReadString()); break; case svc_sellscreen: Cmd_ExecuteString("help", src_command); break; /* Various FITZ protocol messages - FIXME - !protocol => Host_Error */ case svc_fitz_skybox: MSG_ReadString(); // FIXME - TODO break; case svc_fitz_bf: Cmd_ExecuteString("bf", src_command); break; case svc_fitz_fog: /* FIXME - TODO */ MSG_ReadByte(); // density MSG_ReadByte(); // red MSG_ReadByte(); // green MSG_ReadByte(); // blue MSG_ReadShort(); // time break; default: Host_Error("%s: Illegible server message. Previous was %s", __func__, svc_strings[prevcmd]); } prevcmd = cmd; } }
12,876
528
<reponame>JieyangChen7/zfp static const checksum_tuples _3dInt32Checksums[19] = { {UINT64C(0x0), UINT64C(0x300030003), UINT64C(0xab8e83e9)}, {UINT64C(0xa0), UINT64C(0x300030003), UINT64C(0xda55ac5950c74c2)}, {UINT64C(0x2a0), UINT64C(0x300030003), UINT64C(0xb85a3bd936a5c392)}, {UINT64C(0x120), UINT64C(0x300030003), UINT64C(0xdbb57cfa)}, {UINT64C(0x320), UINT64C(0x300030003), UINT64C(0x205d2fad)}, {UINT64C(0x400), UINT64C(0x1000100010), UINT64C(0xad7ade47)}, {UINT64C(0x4b0), UINT64C(0x1000100010), UINT64C(0xc92ee0e3f6e6aa91)}, {UINT64C(0x530), UINT64C(0x1000100010), UINT64C(0xd2482c01)}, {UINT64C(0x4b1), UINT64C(0x1000100010), UINT64C(0x21b0a7777c2c5b2d)}, {UINT64C(0x531), UINT64C(0x1000100010), UINT64C(0x9436e0c7)}, {UINT64C(0x4b2), UINT64C(0x1000100010), UINT64C(0xfe72d7ca4ce4cd2b)}, {UINT64C(0x532), UINT64C(0x1000100010), UINT64C(0xea428b3e)}, {UINT64C(0x4a0), UINT64C(0x1000100010), UINT64C(0x32942f0afdb349c2)}, {UINT64C(0x520), UINT64C(0x1000100010), UINT64C(0xb3d2ff2c)}, {UINT64C(0x4a1), UINT64C(0x1000100010), UINT64C(0x3a036901bbfdee14)}, {UINT64C(0x521), UINT64C(0x1000100010), UINT64C(0xb9258768)}, {UINT64C(0x4a2), UINT64C(0x1000100010), UINT64C(0x8a8ae9c57224ef8e)}, {UINT64C(0x522), UINT64C(0x1000100010), UINT64C(0xea428b3e)}, {UINT64C(0x4d0), UINT64C(0x1000100010), UINT64C(0xf0ab4d96d89cc545)}, };
801
1,444
package mage.cards.b; import java.util.UUID; import mage.abilities.effects.common.DestroyAllEffect; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; import mage.filter.FilterPermanent; /** * * @author emerald000 */ public final class BoilingSeas extends CardImpl { private static final FilterPermanent filter = new FilterPermanent("Islands"); static { filter.add(SubType.ISLAND.getPredicate()); } public BoilingSeas(UUID ownerId, CardSetInfo setInfo) { super(ownerId,setInfo,new CardType[]{CardType.SORCERY},"{3}{R}"); // Destroy all Islands. this.getSpellAbility().addEffect(new DestroyAllEffect(filter)); } private BoilingSeas(final BoilingSeas card) { super(card); } @Override public BoilingSeas copy() { return new BoilingSeas(this); } }
342
539
<filename>dp/cloud/python/magma/db_service/session_manager.py """ Copyright 2021 The Magma Authors. This source code is licensed under the BSD-style license found in the LICENSE file in the root directory of this source tree. Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from contextlib import contextmanager from sqlalchemy.engine import Engine from sqlalchemy.orm import Session as SQLAlchemy_Session from sqlalchemy.orm import sessionmaker Session = SQLAlchemy_Session class SessionManager(object): """ Database session manager class """ def __init__(self, db_engine: Engine): self.session_factory = sessionmaker(bind=db_engine) @contextmanager def session_scope(self) -> Session: """ Get database session Yields: Session: database session object Raises: Exception: generic exception """ session = self.session_factory() try: yield session except Exception: session.rollback() raise finally: session.close()
467
346
import pathlib import os import logging logging = logging.getLogger(__name__) def write_bytes_to_file(path, bytes): logging.info("Writing %d bytes to file %s", len(bytes), path) dir = os.path.dirname(path) pathlib.Path(dir).mkdir(parents=True, exist_ok=True) with open(path, "wb") as f: f.write(bytes) def write_string_to_file(path, s): logging.info("Writing %d chars to file %s", len(s), path) dir = os.path.dirname(path) pathlib.Path(dir).mkdir(parents=True, exist_ok=True) with open(path, "w") as f: f.write(s)
236