max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
1,144 | package de.metas.i18n.impl;
import java.util.Properties;
import java.util.Set;
import de.metas.i18n.ILanguageBL;
import de.metas.i18n.ITranslatableString;
import de.metas.i18n.Msg;
import de.metas.util.Services;
import lombok.NonNull;
/*
* #%L
* de.metas.adempiere.adempiere.base
* %%
* Copyright (C) 2019 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
/**
* Wraps a given <code>text</code> and will call {@link Msg#translate(Properties, String, boolean)}.
*
* @author metas-dev <<EMAIL>>
*/
@lombok.EqualsAndHashCode
final class ADElementOrADMessageTranslatableString implements ITranslatableString
{
private final String text;
ADElementOrADMessageTranslatableString(@NonNull final String text)
{
this.text = text;
}
@Override
public String toString()
{
return text;
}
@Override
public String translate(final String adLanguage)
{
final boolean isSOTrx = true;
return Msg.translate(adLanguage, isSOTrx, text);
}
@Override
public String getDefaultValue()
{
return "@" + text + "@";
}
@Override
public Set<String> getAD_Languages()
{
return Services.get(ILanguageBL.class).getAvailableLanguages().getAD_Languages();
}
}
| 592 |
372 | <gh_stars>100-1000
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.cloudidentity.v1.model;
/**
* Next ID to use: 7
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Identity API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleAppsCloudidentityDevicesV1alpha1EndpointApp extends com.google.api.client.json.GenericJson {
/**
* Output only. Name of the app displayed to the user
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String displayName;
/**
* Output only. [Resource name](https://cloud.google.com/apis/design/resource_names) of the
* EndpointApp in format:
* `devices/{device}/deviceUsers/{device_user}/endpointApps/{endpoint_app}`, where client_app_id
* is the ID of the app associated with the Device.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Output only. Full package name of the installed app
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String packageName;
/**
* Output only. Names of all permissions granted to the installed app
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> permissions;
/**
* Output only. Version code of the installed app
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer versionCode;
/**
* Output only. Version name of the installed app
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String versionName;
/**
* Output only. Name of the app displayed to the user
* @return value or {@code null} for none
*/
public java.lang.String getDisplayName() {
return displayName;
}
/**
* Output only. Name of the app displayed to the user
* @param displayName displayName or {@code null} for none
*/
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp setDisplayName(java.lang.String displayName) {
this.displayName = displayName;
return this;
}
/**
* Output only. [Resource name](https://cloud.google.com/apis/design/resource_names) of the
* EndpointApp in format:
* `devices/{device}/deviceUsers/{device_user}/endpointApps/{endpoint_app}`, where client_app_id
* is the ID of the app associated with the Device.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Output only. [Resource name](https://cloud.google.com/apis/design/resource_names) of the
* EndpointApp in format:
* `devices/{device}/deviceUsers/{device_user}/endpointApps/{endpoint_app}`, where client_app_id
* is the ID of the app associated with the Device.
* @param name name or {@code null} for none
*/
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Output only. Full package name of the installed app
* @return value or {@code null} for none
*/
public java.lang.String getPackageName() {
return packageName;
}
/**
* Output only. Full package name of the installed app
* @param packageName packageName or {@code null} for none
*/
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp setPackageName(java.lang.String packageName) {
this.packageName = packageName;
return this;
}
/**
* Output only. Names of all permissions granted to the installed app
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPermissions() {
return permissions;
}
/**
* Output only. Names of all permissions granted to the installed app
* @param permissions permissions or {@code null} for none
*/
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp setPermissions(java.util.List<java.lang.String> permissions) {
this.permissions = permissions;
return this;
}
/**
* Output only. Version code of the installed app
* @return value or {@code null} for none
*/
public java.lang.Integer getVersionCode() {
return versionCode;
}
/**
* Output only. Version code of the installed app
* @param versionCode versionCode or {@code null} for none
*/
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp setVersionCode(java.lang.Integer versionCode) {
this.versionCode = versionCode;
return this;
}
/**
* Output only. Version name of the installed app
* @return value or {@code null} for none
*/
public java.lang.String getVersionName() {
return versionName;
}
/**
* Output only. Version name of the installed app
* @param versionName versionName or {@code null} for none
*/
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp setVersionName(java.lang.String versionName) {
this.versionName = versionName;
return this;
}
@Override
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp set(String fieldName, Object value) {
return (GoogleAppsCloudidentityDevicesV1alpha1EndpointApp) super.set(fieldName, value);
}
@Override
public GoogleAppsCloudidentityDevicesV1alpha1EndpointApp clone() {
return (GoogleAppsCloudidentityDevicesV1alpha1EndpointApp) super.clone();
}
}
| 2,009 |
372 | <reponame>wuyinlei/VideoDemo
package org.sunger.net.utils;
import java.util.Formatter;
import java.util.Locale;
/**
* Created by Administrator on 2015/11/3.
*/
public class MediaPlayerUtils {
private static StringBuilder mFormatBuilder;
private static Formatter mFormatter;
static {
mFormatBuilder = new StringBuilder();
mFormatter = new Formatter(mFormatBuilder, Locale.getDefault());
}
public static String getVideoDisplayTime(long timeMs) {
int totalSeconds = (int) timeMs / 1000;
int seconds = totalSeconds % 60;
int minutes = (totalSeconds / 60) % 60;
int hours = totalSeconds / 3600;
mFormatBuilder.setLength(0);
if (hours > 0) {
return mFormatter.format("%02d:%02d", minutes, seconds).toString();
} else {
return mFormatter.format("%02d:%02d", minutes, seconds).toString();
}
// if (hours > 0) {
// return mFormatter.format("%02d:%02d:%02d", hours, minutes, seconds).toString();
// } else {
// return mFormatter.format("%02d:%02d:%02d", hours, minutes, seconds).toString();
// }
}
}
| 459 |
486 | <filename>PlacesAPI/app/src/main/java/com/tutsplus/placesapi/MainActivity.java
package com.tutsplus.placesapi;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AutoCompleteTextView;
import android.widget.TextView;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesNotAvailableException;
import com.google.android.gms.common.GooglePlayServicesRepairableException;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.location.places.Place;
import com.google.android.gms.location.places.PlaceBuffer;
import com.google.android.gms.location.places.PlaceLikelihood;
import com.google.android.gms.location.places.PlaceLikelihoodBuffer;
import com.google.android.gms.location.places.Places;
import com.google.android.gms.location.places.ui.PlacePicker;
public class MainActivity extends ActionBarActivity implements GoogleApiClient.OnConnectionFailedListener,
GoogleApiClient.ConnectionCallbacks {
private GoogleApiClient mGoogleApiClient;
private int PLACE_PICKER_REQUEST = 1;
private AutoCompleteAdapter mAdapter;
private TextView mTextView;
private AutoCompleteTextView mPredictTextView;
@Override
protected void onCreate( Bundle savedInstanceState ) {
super.onCreate( savedInstanceState );
setContentView( R.layout.activity_main );
mTextView = (TextView) findViewById( R.id.textview );
mPredictTextView = (AutoCompleteTextView) findViewById( R.id.predicttextview );
mAdapter = new AutoCompleteAdapter( this );
mPredictTextView.setAdapter( mAdapter );
mPredictTextView.setOnItemClickListener( new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
AutoCompletePlace place = (AutoCompletePlace) parent.getItemAtPosition( position );
findPlaceById( place.getId() );
}
});
mGoogleApiClient = new GoogleApiClient
.Builder( this )
.enableAutoManage( this, 0, this )
.addApi( Places.GEO_DATA_API )
.addApi( Places.PLACE_DETECTION_API )
.addConnectionCallbacks( this )
.addOnConnectionFailedListener( this )
.build();
}
@Override
protected void onStart() {
super.onStart();
if( mGoogleApiClient != null )
mGoogleApiClient.connect();
}
@Override
protected void onStop() {
if( mGoogleApiClient != null && mGoogleApiClient.isConnected() ) {
mAdapter.setGoogleApiClient( null );
mGoogleApiClient.disconnect();
}
super.onStop();
}
private void findPlaceById( String id ) {
if( TextUtils.isEmpty( id ) || mGoogleApiClient == null || !mGoogleApiClient.isConnected() )
return;
Places.GeoDataApi.getPlaceById( mGoogleApiClient, id ) .setResultCallback( new ResultCallback<PlaceBuffer>() {
@Override
public void onResult(PlaceBuffer places) {
if( places.getStatus().isSuccess() ) {
Place place = places.get( 0 );
displayPlace( place );
mPredictTextView.setText( "" );
mAdapter.clear();
}
//Release the PlaceBuffer to prevent a memory leak
places.release();
}
} );
}
private void guessCurrentPlace() {
PendingResult<PlaceLikelihoodBuffer> result = Places.PlaceDetectionApi.getCurrentPlace( mGoogleApiClient, null );
result.setResultCallback( new ResultCallback<PlaceLikelihoodBuffer>() {
@Override
public void onResult( PlaceLikelihoodBuffer likelyPlaces ) {
PlaceLikelihood placeLikelihood = likelyPlaces.get( 0 );
String content = "";
if( placeLikelihood != null && placeLikelihood.getPlace() != null && !TextUtils.isEmpty( placeLikelihood.getPlace().getName() ) )
content = "Most likely place: " + placeLikelihood.getPlace().getName() + "\n";
if( placeLikelihood != null )
content += "Percent change of being there: " + (int) ( placeLikelihood.getLikelihood() * 100 ) + "%";
mTextView.setText( content );
likelyPlaces.release();
}
});
}
private void displayPlacePicker() {
if( mGoogleApiClient == null || !mGoogleApiClient.isConnected() )
return;
PlacePicker.IntentBuilder builder = new PlacePicker.IntentBuilder();
try {
startActivityForResult( builder.build( getApplicationContext() ), PLACE_PICKER_REQUEST );
} catch ( GooglePlayServicesRepairableException e ) {
Log.d( "PlacesAPI Demo", "GooglePlayServicesRepairableException thrown" );
} catch ( GooglePlayServicesNotAvailableException e ) {
Log.d( "PlacesAPI Demo", "GooglePlayServicesNotAvailableException thrown" );
}
}
protected void onActivityResult( int requestCode, int resultCode, Intent data ) {
if( requestCode == PLACE_PICKER_REQUEST && resultCode == RESULT_OK ) {
displayPlace( PlacePicker.getPlace( data, this ) );
}
}
private void displayPlace( Place place ) {
if( place == null )
return;
String content = "";
if( !TextUtils.isEmpty( place.getName() ) ) {
content += "Name: " + place.getName() + "\n";
}
if( !TextUtils.isEmpty( place.getAddress() ) ) {
content += "Address: " + place.getAddress() + "\n";
}
if( !TextUtils.isEmpty( place.getPhoneNumber() ) ) {
content += "Phone: " + place.getPhoneNumber();
}
mTextView.setText( content );
}
@Override
public boolean onCreateOptionsMenu( Menu menu ) {
getMenuInflater().inflate( R.menu.menu_main, menu );
return true;
}
@Override
public boolean onOptionsItemSelected( MenuItem item ) {
int id = item.getItemId();
if( id == R.id.action_place_picker ) {
displayPlacePicker();
return true;
} else if( id == R.id.action_guess_current_place ) {
guessCurrentPlace();
return true;
}
return super.onOptionsItemSelected( item );
}
@Override
public void onConnected( Bundle bundle ) {
if( mAdapter != null )
mAdapter.setGoogleApiClient( mGoogleApiClient );
}
@Override
public void onConnectionSuspended( int i ) {
}
@Override
public void onConnectionFailed( ConnectionResult connectionResult ) {
}
}
| 2,953 |
435 | {
"copyright_text": "Standard YouTube License",
"description": "PyCon Finland 2015\n<NAME> - Bug forecasting by visualizing code evolution\n\nI would like to introduce and expand the ideas presented by <NAME> in his work (Your Code as a Crime Scene \u2013 applying forensics to mining software repositories - where do the bugs propbably lurch). Tornhill's main ideas are combining cyclomatic code complexity metric with repository history -calculated \"code/knowledge ownership\". Adding test coverage data, replicated production-system web traffic and their call graphs adds whole new dimensions to the data available from the repository.\n\nAbout the author: I'm a(n almost-university-dropout) mathemathics major, software engineer. I've worked professionally with Python (and Django) since 2009, having also had the privilege to work only with DVCS. I've worked in different organizations ranging from freelancing and small companies to multinational corporations. I enjoy automation, debuggers and meta-programming. Most of my non-programming spare time goes into a cappella singing, improvisational theatre and finding ways to express my acrobatic tendencies.",
"duration": 3556,
"language": "eng",
"recorded": "2015-10-19",
"related_urls": [],
"speakers": [
"<NAME>"
],
"tags": [],
"thumbnail_url": "https://i.ytimg.com/vi/BMSyr0EkvlU/maxresdefault.jpg",
"title": "Bug forecasting by visualizing code evolution",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=BMSyr0EkvlU"
}
]
}
| 429 |
2,554 | #!/usr/bin/python
"""
Configure and run tools
"""
from subprocess import call
import os
import sys
# Enable logging
import logging
logging.basicConfig(
format='%(asctime)s [%(levelname)s] %(message)s',
level=logging.INFO,
stream=sys.stdout)
log = logging.getLogger(__name__)
ENV_RESOURCES_PATH = os.getenv("RESOURCES_PATH", "/resources")
ENV_WORKSPACE_HOME = os.getenv("WORKSPACE_HOME", "/workspace")
HOME = os.getenv("HOME", "/root")
DESKTOP_PATH = HOME + "/Desktop"
# Get jupyter token
ENV_AUTHENTICATE_VIA_JUPYTER = os.getenv("AUTHENTICATE_VIA_JUPYTER", "false")
token_parameter = ""
if ENV_AUTHENTICATE_VIA_JUPYTER.lower() == "true":
# Check if started via Jupyterhub -> JPY_API_TOKEN is set
ENV_JPY_API_TOKEN = os.getenv("JPY_API_TOKEN", None)
if ENV_JPY_API_TOKEN:
token_parameter = "?token=" + ENV_JPY_API_TOKEN
elif ENV_AUTHENTICATE_VIA_JUPYTER and ENV_AUTHENTICATE_VIA_JUPYTER.lower() != "false":
token_parameter = "?token=" + ENV_AUTHENTICATE_VIA_JUPYTER
# Create Jupyter Shortcut - at runtime since the jupyterhub token is needed
url = 'http://localhost:8092' + token_parameter
shortcut_metadata = '[Desktop Entry]\nVersion=1.0\nType=Link\nName=Jupyter Notebook\nComment=\nCategories=Development;\nIcon=' + ENV_RESOURCES_PATH + '/icons/jupyter-icon.png\nURL=' + url
call('printf "' + shortcut_metadata + '" > ' + DESKTOP_PATH + '/jupyter.desktop', shell=True) # create a link on the Desktop to your Jupyter notebook server
call('chmod +x ' + DESKTOP_PATH + '/jupyter.desktop', shell=True) # Make executable
call('printf "' + shortcut_metadata + '" > /usr/share/applications/jupyter.desktop', shell=True) # create a link in categories menu to your Jupyter notebook server
call('chmod +x /usr/share/applications/jupyter.desktop', shell=True) # Make executable
# Create Jupyter Lab Shortcut
url = 'http://localhost:8092' + "/lab" + token_parameter
shortcut_metadata = '[Desktop Entry]\nVersion=1.0\nType=Link\nName=Jupyter Lab\nComment=\nCategories=Development;\nIcon=' + ENV_RESOURCES_PATH + '/icons/jupyterlab-icon.png\nURL=' + url
call('printf "' + shortcut_metadata + '" > /usr/share/applications/jupyterlab.desktop', shell=True) # create a link in categories menu to your Jupyter Lab server
call('chmod +x /usr/share/applications/jupyterlab.desktop', shell=True) # Make executable
# Configure filebrowser - only if database file does not exist yet (e.g. isn't restored)
if not os.path.exists(HOME + '/filebrowser.db'):
log.info("Initialize filebrowser database.")
# Init filebrowser configuration - Surpress all output
call('filebrowser config init --database=' + HOME + '/filebrowser.db > /dev/null', shell=True)
# Add admin user
import random, string
filebrowser_pwd = ''.join(random.sample(string.ascii_lowercase, 20))
log.info("Create filebrowser admin with generated password: " + filebrowser_pwd)
call('filebrowser users add admin ' + filebrowser_pwd + ' --perm.admin=true --database=' + HOME + '/filebrowser.db > /dev/null', shell=True)
# Configure filebrowser
configure_filebrowser = 'filebrowser config set --root="/" --auth.method=proxy --auth.header=X-Token-Header ' \
+ ' --branding.files=$RESOURCES_PATH"/filebrowser/" --branding.name="Filebrowser" ' \
+ ' --branding.disableExternal --signup=false --perm.admin=false --perm.create=false ' \
+ ' --perm.delete=false --perm.download=true --perm.execute=false ' \
+ ' --perm.admin=false --perm.create=false --perm.delete=false ' \
+ ' --perm.modify=false --perm.rename=false --perm.share=false ' \
+ ' --database=' + HOME + '/filebrowser.db'
# Port and base url is configured at startup - Surpress all output
call(configure_filebrowser + " > /dev/null", shell=True)
# Tools are started via supervisor, see supervisor.conf | 1,478 |
1,155 | /*
* Copyright 2009, <NAME>.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Mahmood Ali. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.notnoop.apns.integration;
import com.notnoop.apns.APNS;
import com.notnoop.apns.ApnsDelegate;
import com.notnoop.apns.ApnsNotification;
import com.notnoop.apns.ApnsService;
import com.notnoop.apns.DeliveryError;
import com.notnoop.apns.EnhancedApnsNotification;
import com.notnoop.apns.integration.ApnsDelegateRecorder.MessageSentFailedRecord;
import com.notnoop.apns.utils.FixedCertificates;
import com.notnoop.apns.utils.Simulator.ApnsResponse;
import com.notnoop.apns.utils.Simulator.ApnsSimulatorWithVerification;
import com.notnoop.exceptions.ApnsDeliveryErrorException;
import com.notnoop.exceptions.NetworkIOException;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import java.util.List;
import static com.notnoop.apns.utils.FixedCertificates.LOCALHOST;
import static com.notnoop.apns.utils.FixedCertificates.clientContext;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ApnsConnectionResendTest {
private static EnhancedApnsNotification NOTIFICATION_0 = buildNotification(0);
private static EnhancedApnsNotification NOTIFICATION_1 = buildNotification(1);
private static EnhancedApnsNotification NOTIFICATION_2 = buildNotification(2);
private static ApnsSimulatorWithVerification apnsSim;
private ApnsDelegateRecorder delegateRecorder;
private ApnsService testee;
@Before
public void setUp() {
if (apnsSim == null) {
apnsSim = new ApnsSimulatorWithVerification(FixedCertificates.serverContext().getServerSocketFactory());
apnsSim.start();
}
apnsSim.reset();
delegateRecorder = new ApnsDelegateRecorder();
testee = build(delegateRecorder);
}
@AfterClass
public static void tearDownClass() {
if (apnsSim != null) {
apnsSim.stop();
apnsSim = null;
}
}
/*
* Test when we submit 3 messages to APNS 0, 1, 2. 0 is an error but we don't see the error response back until
* 1,2 have already been submitted. Then at this point the network connection to APNS cannot be made, so that
* when retrying the submissions we have to notify the client that delivery failed for 1 and 2.
*/
@Test
public void testGivenFailedSubmissionDueToErrorThenApnsDownWithNotificationsInBufferEnsureClientNotified()
throws Exception {
final DeliveryError deliveryError = DeliveryError.INVALID_PAYLOAD_SIZE;
apnsSim.when(NOTIFICATION_0).thenDoNothing();
apnsSim.when(NOTIFICATION_1).thenDoNothing();
apnsSim.when(NOTIFICATION_2).thenRespond(ApnsResponse.returnErrorAndShutdown(deliveryError, NOTIFICATION_0));
testee.push(NOTIFICATION_0);
testee.push(NOTIFICATION_1);
testee.push(NOTIFICATION_2);
// Give some time for connection failure to take place
Thread.sleep(5000);
// Verify received expected notifications
apnsSim.verify();
// verify delegate calls
assertEquals(3, delegateRecorder.getSent().size());
final List<MessageSentFailedRecord> failed = delegateRecorder.getFailed();
assertEquals(3, failed.size());
// first is failed delivery due to payload size
failed.get(0).assertRecord(NOTIFICATION_0, new ApnsDeliveryErrorException(deliveryError));
// second and third are due to not being able to connect to APNS
assertNetworkIoExForRedelivery(NOTIFICATION_1, failed.get(1));
assertNetworkIoExForRedelivery(NOTIFICATION_2, failed.get(2));
}
private void assertNetworkIoExForRedelivery(ApnsNotification notification, MessageSentFailedRecord failed) {
failed.assertRecord(notification, new NetworkIOException());
final NetworkIOException found = failed.getException();
assertTrue(found.isResend());
}
private ApnsService build(ApnsDelegate delegate) {
return APNS.newService()
.withConnectTimeout(1000)
.withSSLContext(clientContext())
.withGatewayDestination(LOCALHOST, apnsSim.getEffectiveGatewayPort())
.withFeedbackDestination(LOCALHOST, apnsSim.getEffectiveFeedbackPort())
.withDelegate(delegate).build();
}
private static EnhancedApnsNotification buildNotification(int id) {
final String deviceToken = ApnsSimulatorWithVerification.deviceTokenForId(id);
return new EnhancedApnsNotification(id, 1, deviceToken, "{\"aps\":{}}");
}
}
| 2,151 |
839 | <reponame>AnEmortalKid/cxf<gh_stars>100-1000
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.systest.jaxrs.security.oauth2.common;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.UnsupportedCallbackException;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.message.Message;
import org.apache.cxf.phase.PhaseInterceptorChain;
import org.apache.cxf.rt.security.claims.SAMLClaim;
import org.apache.wss4j.common.crypto.Crypto;
import org.apache.wss4j.common.crypto.CryptoFactory;
import org.apache.wss4j.common.ext.WSSecurityException;
import org.apache.wss4j.common.saml.SAMLCallback;
import org.apache.wss4j.common.saml.bean.ActionBean;
import org.apache.wss4j.common.saml.bean.AttributeBean;
import org.apache.wss4j.common.saml.bean.AttributeStatementBean;
import org.apache.wss4j.common.saml.bean.AudienceRestrictionBean;
import org.apache.wss4j.common.saml.bean.AuthDecisionStatementBean;
import org.apache.wss4j.common.saml.bean.AuthDecisionStatementBean.Decision;
import org.apache.wss4j.common.saml.bean.AuthenticationStatementBean;
import org.apache.wss4j.common.saml.bean.ConditionsBean;
import org.apache.wss4j.common.saml.bean.SubjectBean;
import org.apache.wss4j.common.saml.bean.Version;
import org.apache.wss4j.common.saml.builder.SAML2Constants;
import org.joda.time.DateTime;
/**
* A CallbackHandler instance that is used by the STS to mock up a SAML Attribute Assertion.
*/
public class SamlCallbackHandler implements CallbackHandler {
private String confirmationMethod = SAML2Constants.CONF_BEARER;
private boolean signAssertion = true;
private String issuer = "resourceOwner";
private String audience;
private boolean saml2 = true;
private String cryptoPropertiesFile = "org/apache/cxf/systest/jaxrs/security/alice.properties";
private String issuerKeyName = "alice";
private String issuerKeyPassword = "password";
private String subjectName = "alice";
public SamlCallbackHandler(boolean signAssertion) {
this.signAssertion = signAssertion;
}
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
Message m = PhaseInterceptorChain.getCurrentMessage();
for (int i = 0; i < callbacks.length; i++) {
if (callbacks[i] instanceof SAMLCallback) {
SAMLCallback callback = (SAMLCallback) callbacks[i];
if (saml2) {
callback.setSamlVersion(Version.SAML_20);
} else {
callback.setSamlVersion(Version.SAML_11);
}
callback.setIssuer(issuer);
String subject = m != null ? (String)m.getContextualProperty("saml.subject.name") : null;
if (subject == null) {
subject = subjectName;
}
String subjectQualifier = "www.mock-sts.com";
SubjectBean subjectBean =
new SubjectBean(
subject, subjectQualifier, confirmationMethod
);
callback.setSubject(subjectBean);
ConditionsBean conditions = new ConditionsBean();
AudienceRestrictionBean audienceRestriction = new AudienceRestrictionBean();
audienceRestriction.setAudienceURIs(Collections.singletonList(audience));
conditions.setAudienceRestrictions(Collections.singletonList(audienceRestriction));
callback.setConditions(conditions);
AuthDecisionStatementBean authDecBean = new AuthDecisionStatementBean();
authDecBean.setDecision(Decision.INDETERMINATE);
authDecBean.setResource("https://sp.example.com/SAML2");
authDecBean.setSubject(subjectBean);
ActionBean actionBean = new ActionBean();
actionBean.setContents("Read");
authDecBean.setActions(Collections.singletonList(actionBean));
callback.setAuthDecisionStatementData(Collections.singletonList(authDecBean));
AuthenticationStatementBean authBean = new AuthenticationStatementBean();
authBean.setSubject(subjectBean);
authBean.setAuthenticationInstant(new DateTime());
authBean.setSessionIndex("123456");
authBean.setSubject(subjectBean);
// AuthnContextClassRef is not set
authBean.setAuthenticationMethod(
"urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport");
callback.setAuthenticationStatementData(
Collections.singletonList(authBean));
AttributeStatementBean attrBean = new AttributeStatementBean();
attrBean.setSubject(subjectBean);
List<String> roles = m != null
? CastUtils.<String>cast((List<?>)m.getContextualProperty("saml.roles")) : null;
if (roles == null) {
roles = Collections.singletonList("user");
}
List<AttributeBean> claims = new ArrayList<>();
AttributeBean roleClaim = new AttributeBean();
roleClaim.setSimpleName("subject-role");
roleClaim.setQualifiedName(SAMLClaim.SAML_ROLE_ATTRIBUTENAME_DEFAULT);
roleClaim.setNameFormat(SAML2Constants.ATTRNAME_FORMAT_UNSPECIFIED);
roleClaim.setAttributeValues(new ArrayList<>(roles));
claims.add(roleClaim);
List<String> authMethods =
m != null ? CastUtils.<String>cast((List<?>)m.getContextualProperty("saml.auth")) : null;
if (authMethods == null) {
authMethods = Collections.singletonList("password");
}
AttributeBean authClaim = new AttributeBean();
authClaim.setSimpleName("http://claims/authentication");
authClaim.setQualifiedName("http://claims/authentication");
authClaim.setNameFormat("http://claims/authentication-format");
authClaim.setAttributeValues(new ArrayList<>(authMethods));
claims.add(authClaim);
attrBean.setSamlAttributes(claims);
callback.setAttributeStatementData(Collections.singletonList(attrBean));
if (signAssertion) {
try {
Crypto crypto = CryptoFactory.getInstance(cryptoPropertiesFile);
callback.setIssuerCrypto(crypto);
callback.setIssuerKeyName(issuerKeyName);
callback.setIssuerKeyPassword(issuerKeyPassword);
callback.setSignAssertion(true);
} catch (WSSecurityException e) {
throw new IOException(e);
}
}
}
}
}
public String getCryptoPropertiesFile() {
return cryptoPropertiesFile;
}
public void setCryptoPropertiesFile(String cryptoPropertiesFile) {
this.cryptoPropertiesFile = cryptoPropertiesFile;
}
public String getIssuerKeyName() {
return issuerKeyName;
}
public void setIssuerKeyName(String issuerKeyName) {
this.issuerKeyName = issuerKeyName;
}
public String getIssuerKeyPassword() {
return issuerKeyPassword;
}
public void setIssuerKeyPassword(String issuerKeyPassword) {
this.issuerKeyPassword = issuerKeyPassword;
}
public String getIssuer() {
return issuer;
}
public void setIssuer(String issuer) {
this.issuer = issuer;
}
public String getAudience() {
return audience;
}
public void setAudience(String audience) {
this.audience = audience;
}
public void setConfirmationMethod(String confMethod) {
this.confirmationMethod = confMethod;
}
public boolean isSaml2() {
return saml2;
}
public void setSaml2(boolean saml2) {
this.saml2 = saml2;
}
public String getSubjectName() {
return subjectName;
}
public void setSubjectName(String subjectName) {
this.subjectName = subjectName;
}
}
| 3,914 |
22,688 | <filename>modules/canbus/vehicle/zhongyun/protocol/vehicle_state_feedback_c1.cc<gh_stars>1000+
/******************************************************************************
* Copyright 2019 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "modules/canbus/vehicle/zhongyun/protocol/vehicle_state_feedback_c1.h"
#include "glog/logging.h"
#include "modules/drivers/canbus/common/byte.h"
#include "modules/drivers/canbus/common/canbus_consts.h"
namespace apollo {
namespace canbus {
namespace zhongyun {
using ::apollo::drivers::canbus::Byte;
Vehiclestatefeedbackc1::Vehiclestatefeedbackc1() {}
const int32_t Vehiclestatefeedbackc1::ID = 0xC1;
void Vehiclestatefeedbackc1::Parse(const std::uint8_t* bytes, int32_t length,
ChassisDetail* chassis) const {
chassis->mutable_zhongyun()
->mutable_vehicle_state_feedback_c1()
->set_parking_actual(parking_actual(bytes, length));
chassis->mutable_zhongyun()
->mutable_vehicle_state_feedback_c1()
->set_brake_torque_feedback(brake_torque_feedback(bytes, length));
chassis->mutable_zhongyun()
->mutable_vehicle_state_feedback_c1()
->set_gear_state_actual(gear_state_actual(bytes, length));
chassis->mutable_zhongyun()
->mutable_vehicle_state_feedback_c1()
->set_steering_actual(steering_actual(bytes, length));
chassis->mutable_zhongyun()->mutable_vehicle_state_feedback_c1()->set_speed(
speed(bytes, length) / 3.6);
}
// config detail: {'name': 'parking_actual', 'enum': {0:
// 'PARKING_ACTUAL_RELEASE', 1: 'PARKING_ACTUAL_PARKING_TRIGGER'},
// 'precision': 1.0, 'len': 8, 'is_signed_var': False, 'offset': 0.0,
// 'physical_range': '[0|1]', 'bit': 56, 'type': 'enum', 'order': 'intel',
// 'physical_unit': ''}
Vehicle_state_feedback_c1::Parking_actualType
Vehiclestatefeedbackc1::parking_actual(const std::uint8_t* bytes,
int32_t length) const {
Byte t0(bytes + 7);
int32_t x = t0.get_byte(0, 8);
Vehicle_state_feedback_c1::Parking_actualType ret =
static_cast<Vehicle_state_feedback_c1::Parking_actualType>(x);
return ret;
}
// config detail: {'name': 'brake_torque_feedback', 'offset': 0.0, 'precision':
// 0.05, 'len': 16, 'is_signed_var': False, 'physical_range': '[0|100]', 'bit':
// 40, 'type': 'double', 'order': 'intel', 'physical_unit': '%'}
double Vehiclestatefeedbackc1::brake_torque_feedback(const std::uint8_t* bytes,
int32_t length) const {
Byte t0(bytes + 6);
int32_t x = t0.get_byte(0, 8);
Byte t1(bytes + 5);
int32_t t = t1.get_byte(0, 8);
x <<= 8;
x |= t;
double ret = x * 0.050000;
return ret;
}
// config detail: {'name': 'gear_state_actual', 'enum': {1:
// 'GEAR_STATE_ACTUAL_P', 2: 'GEAR_STATE_ACTUAL_N', 3: 'GEAR_STATE_ACTUAL_D', 4:
// 'GEAR_STATE_ACTUAL_R', 5: 'GEAR_STATE_ACTUAL_INVALID'}, 'precision': 1.0,
// 'len': 8, 'is_signed_var': False, 'offset': 0.0, 'physical_range': '[0|5]',
// 'bit': 32, 'type': 'enum', 'order': 'intel', 'physical_unit': ''}
Vehicle_state_feedback_c1::Gear_state_actualType
Vehiclestatefeedbackc1::gear_state_actual(const std::uint8_t* bytes,
int32_t length) const {
Byte t0(bytes + 4);
int32_t x = t0.get_byte(0, 8);
Vehicle_state_feedback_c1::Gear_state_actualType ret =
static_cast<Vehicle_state_feedback_c1::Gear_state_actualType>(x);
return ret;
}
// config detail: {'name': 'steering_actual', 'offset': -1638.35, 'precision':
// 0.05, 'len': 16, 'is_signed_var': False, 'physical_range': '[-40|40]', 'bit':
// 16, 'type': 'double', 'order': 'intel', 'physical_unit': 'deg'}
double Vehiclestatefeedbackc1::steering_actual(const std::uint8_t* bytes,
int32_t length) const {
Byte t0(bytes + 3);
int32_t x = t0.get_byte(0, 8);
Byte t1(bytes + 2);
int32_t t = t1.get_byte(0, 8);
x <<= 8;
x |= t;
double ret = x * 0.050000 + -1638.350000;
return ret;
}
// config detail: {'name': 'speed', 'offset': 0.0, 'precision': 0.01, 'len': 16,
// 'is_signed_var': False, 'physical_range': '[0|35]', 'bit': 0, 'type':
// 'double', 'order': 'intel', 'physical_unit': 'kph'}
double Vehiclestatefeedbackc1::speed(const std::uint8_t* bytes,
int32_t length) const {
Byte t0(bytes + 1);
int32_t x = t0.get_byte(0, 8);
Byte t1(bytes + 0);
int32_t t = t1.get_byte(0, 8);
x <<= 8;
x |= t;
double ret = x * 0.010000;
return ret;
}
} // namespace zhongyun
} // namespace canbus
} // namespace apollo
| 2,170 |
1,182 | <filename>workers/example/worker.json
{
"name": "example",
"ip": "0.0.0.0",
"hostname": "example.com",
"username": "user",
"password": <PASSWORD>,
"port": 22,
"python": "python3",
"city": "mycensoredcity",
"keyfile": "example.pem",
"country": "mycensoredcountry",
"geneva_path": "evolving-evasion",
"censored": true,
"active": true,
"provider": null
}
| 182 |
5,169 | {
"name": "BMXSwitch",
"version": "1.0",
"summary": "Image based replacement for UISwitch.",
"homepage": "https://github.com/mbigatti/BMXSwitch",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/mbigatti/BMXSwitch.git",
"tag": "1.0"
},
"platforms": {
"ios": "5.0"
},
"source_files": [
"BMXSwitch",
"BMXSwitch/**/*.{h,m}"
],
"requires_arc": true
}
| 199 |
552 | <gh_stars>100-1000
#pragma once
namespace et {
//=============
// Ref Pointer
//=============
// static functionality
////////////////////////
//-----------------------
// RefPtr::StaticCast
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType> RefPtr<TDataType>::StaticCast(RefPtr<TOtherType> const& from)
{
RefPtr ret;
ret.m_Ptr = static_cast<TDataType*>(from.Get());
ret.m_RefCount = from.m_RefCount;
if (ret.m_RefCount != nullptr)
{
++(ret.m_RefCount->m_References);
}
return ret;
}
// From nullptr
////////////////
//------------------
// RefPtr::c-tor
//
template <typename TDataType>
RefPtr<TDataType>& RefPtr<TDataType>::operator=(std::nullptr_t)
{
Invalidate();
return *this;
}
// From creator
////////////////
//------------------
// RefPtr::c-tor
//
// Move data in from creation and invalidate creation class. This creates our first reference
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>::RefPtr(Create<TOtherType>&& create)
{
TDataType* tempPtr = create.Release();
std::swap(m_Ptr, tempPtr);
StartRefCount();
}
//---------------------------------
// RefPtr::operator=
//
// Move data in from creation and invalidate creation class. This creates our first reference
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>& RefPtr<TDataType>::operator=(Create<TOtherType>&& create)
{
DecrementRefcount();
TDataType* tempPtr = create.Release();
std::swap(m_Ptr, tempPtr);
StartRefCount();
return *this;
}
// copy from other reference
/////////////////////////////
//---------------------------------
// RefPtr::c-tor
//
// copy and increment the refcount
//
template <typename TDataType>
RefPtr<TDataType>::RefPtr(RefPtr<TDataType> const& copy)
: m_Ptr(copy.m_Ptr)
, m_RefCount(copy.m_RefCount)
{
if (m_RefCount != nullptr)
{
++(m_RefCount->m_References);
}
}
//---------------------------------
// RefPtr::operator=
//
// Copy assignment - increment refcount - using copy and swap idiom
//
template <typename TDataType>
RefPtr<TDataType>& RefPtr<TDataType>::operator=(RefPtr<TDataType> copy)
{
copy.Swap(*this);
return *this;
}
//---------------------------------
// RefPtr::c-tor
//
// copy and increment the refcount
// Version supporting derived classes
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>::RefPtr(RefPtr<TOtherType> const& copy)
: m_RefCount(copy.m_RefCount)
{
TDataType* tempPtr = copy.m_Ptr;
std::swap(m_Ptr, tempPtr);
if (m_RefCount != nullptr)
{
++(m_RefCount->m_References);
}
}
//---------------------------------
// RefPtr::operator=
//
// Copy assignment - increment refcount
// Version supporting derived classes
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>& RefPtr<TDataType>::operator=(RefPtr<TOtherType> const& copy)
{
TDataType* tempPtr = copy.m_Ptr;
std::swap(m_Ptr, tempPtr);
m_RefCount = copy.m_RefCount;
if (m_RefCount != nullptr)
{
++(m_RefCount->m_References);
}
return *this;
}
// move from refptr
////////////////////
//---------------------------------
// RefPtr::c-tor
//
// Move constructor - invalidate old pointer
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>::RefPtr(RefPtr<TOtherType>&& moving)
{
// copy data. moved refcount stays intact, our own refcount will be 0 anyway
TDataType* tempPtr = moving.m_Ptr;
std::swap(m_Ptr, tempPtr);
std::swap(m_RefCount, moving.m_RefCount);
// invalidate old pointer
moving.m_Ptr = nullptr;
}
//---------------------------------
// RefPtr::operator=
//
// Move assignment - invalidate old pointer
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>& RefPtr<TDataType>::operator=(RefPtr<TOtherType>&& moving)
{
// if we hold a reference remove it
DecrementRefcount();
// copy data. moved refcount stays intact
TDataType* tempPtr = moving.m_Ptr;
std::swap(m_Ptr, tempPtr);
m_RefCount = copy.m_RefCount;
// invalidate old pointer
moving.m_Ptr = nullptr;
moving.m_RefCount = nullptr;
return *this;
}
// move from unique ptr
////////////////////////
//---------------------------------
// RefPtr::c-tor
//
// Move constructor from unique pointer
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>::RefPtr(UniquePtr<TOtherType>&& unique)
{
// copy data. moved refcount stays intact, our own refcount will be 0 anyway
TDataType* tempPtr = unique.Release();
std::swap(m_Ptr, tempPtr);
StartRefCount();
}
//---------------------------------
// RefPtr::operator=
//
// Move assignment from unique pointer
//
template <typename TDataType>
template <typename TOtherType>
RefPtr<TDataType>& RefPtr<TDataType>::operator=(UniquePtr<TOtherType>&& unique)
{
// if we hold a reference remove it
DecrementRefcount();
// copy data. moved refcount stays intact
TDataType* tempPtr = unique.Release();
std::swap(m_Ptr, tempPtr);
StartRefCount();
return *this;
}
// destroy
/////////////
//---------------------------------
// RefPtr::d-tor
//
// decrement ref count, delete data if there are no reference, delete refcount if there aren't even any weak references
//
template <typename TDataType>
RefPtr<TDataType>::~RefPtr()
{
DecrementRefcount();
}
// Utility
/////////////
//---------------------------------
// RefPtr::IsNull
//
template <typename TDataType>
bool RefPtr<TDataType>::IsNull() const
{
return (m_Ptr == nullptr);
}
//---------------------------------
// RefPtr::Swap
//
// Swap two unique pointers
//
template <typename TDataType>
void RefPtr<TDataType>::Swap(RefPtr<TDataType>& other) noexcept
{
std::swap(m_Ptr, other.m_Ptr);
std::swap(m_RefCount, other.m_RefCount);
}
// Accessors
/////////////
//---------------------------------
// RefPtr::Get
//
template <typename TDataType>
TDataType* RefPtr<TDataType>::Get() const
{
return m_Ptr;
}
//---------------------------------
// RefPtr::operator->
//
template <typename TDataType>
TDataType* RefPtr<TDataType>::operator->() const
{
ET_ASSERT(m_Ptr != nullptr);
return Get();
}
//---------------------------------
// RefPtr::operator*
//
template <typename TDataType>
TDataType& RefPtr<TDataType>::operator*() const
{
ET_ASSERT(m_Ptr != nullptr);
return *Get();
}
// private
///////////
//---------------------------------
// RefPtr::StartRefCount
//
template <typename TDataType>
void et::RefPtr<TDataType>::StartRefCount()
{
if (m_Ptr != nullptr)
{
m_RefCount = new RefCount();
}
}
//---------------------------------
// RefPtr::DecrementRefcount
//
template <typename TDataType>
void RefPtr<TDataType>::DecrementRefcount()
{
if (m_Ptr != nullptr)
{
--(m_RefCount->m_References);
if (m_RefCount->m_References == 0u)
{
delete m_Ptr;
if (m_RefCount->m_WeakReferences == 0u)
{
delete m_RefCount;
}
}
}
}
//---------------------------------
// RefPtr::DecrementRefcount
//
template <typename TDataType>
void RefPtr<TDataType>::Invalidate()
{
DecrementRefcount();
m_Ptr = nullptr;
m_RefCount = nullptr;
}
// comparisons
///////////////
//---------------------------------
// RefPtr::operator==
//
// Compare pointer equals null
//
template <typename TDataType>
bool operator==(RefPtr<TDataType> const& ptr, std::nullptr_t)
{
return ptr.IsNull();
}
template <typename TDataType>
bool operator==(std::nullptr_t, RefPtr<TDataType> const& ptr)
{
return ptr.IsNull();
}
template <typename TDataType>
bool operator==(RefPtr<TDataType> const& ptr1, RefPtr<TDataType> const& ptr2)
{
return (ptr1.Get() == ptr2.Get());
}
template <typename TDataType>
bool operator!=(RefPtr<TDataType> const& ptr, std::nullptr_t)
{
return !ptr.IsNull();
}
template <typename TDataType>
bool operator!=(std::nullptr_t, RefPtr<TDataType> const& ptr)
{
return !ptr.IsNull();
}
template <typename TDataType>
bool operator!=(RefPtr<TDataType> const& ptr1, RefPtr<TDataType> const& ptr2)
{
return !(ptr1 == ptr2);
}
} // namespace et
| 2,757 |
488 | // g0013.cc
// compound lit with type defn inside array deref
// related to d0078.cc
void f()
{
int *a;
a[
(
(struct Foo { int x; }){ 0 } // compound lit: {x=0}
).x
];
}
| 98 |
879 | package org.zstack.sdk.iam2.entity;
public class IAM2ProjectAttributeInventory extends org.zstack.sdk.iam2.entity.IAM2AttributeInventory {
public java.lang.String projectUuid;
public void setProjectUuid(java.lang.String projectUuid) {
this.projectUuid = projectUuid;
}
public java.lang.String getProjectUuid() {
return this.projectUuid;
}
}
| 145 |
310 | <reponame>dreeves/usesthis
{
"name": "Proun",
"description": "An abstract ball game.",
"url": "http://www.proun-game.com/"
} | 55 |
980 | package org.jcodec.codecs.vpx.vp8.data;
/**
* This class is part of JCodec ( www.jcodec.org ) This software is distributed
* under FreeBSD License.
*
* The class is a direct java port of libvpx's
* (https://github.com/webmproject/libvpx) relevant VP8 code with significant
* java oriented refactoring.
*
* @author The JCodec project
*
*/
public class QualityMetrics {
public int rateBase;
public int rateComp;
public long distortion;
public long error;
} | 159 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-4w62-g43g-55gj",
"modified": "2022-05-13T01:36:05Z",
"published": "2022-05-13T01:36:05Z",
"aliases": [
"CVE-2017-9656"
],
"details": "The backend database of the Philips DoseWise Portal application versions 1.1.7.333 and 2.1.1.3069 uses hard-coded credentials for a database account with privileges that can affect confidentiality, integrity, and availability of the database. For an attacker to exploit this vulnerability, elevated privileges are first required for an attacker to access the web application backend system files that contain the hard-coded credentials. Successful exploitation may allow a remote attacker to gain access to the database of the DWP application, which contains PHI. CVSS v3 base score: 9.1, CVSS vector string: AV:N/AC:L/PR:H/UI:N/S:C/C:H/I:H/A:H.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.0/AV:N/AC:L/PR:H/UI:N/S:C/C:H/I:H/A:H"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2017-9656"
},
{
"type": "WEB",
"url": "https://ics-cert.us-cert.gov/advisories/ICSMA-17-229-01"
},
{
"type": "WEB",
"url": "http://www.philips.com/productsecurity"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/100471"
}
],
"database_specific": {
"cwe_ids": [
"CWE-798"
],
"severity": "CRITICAL",
"github_reviewed": false
}
} | 637 |
17,481 | <gh_stars>1000+
/*
* Copyright (C) 2016 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.functional.producers.subcomponent.pruning;
import com.google.common.util.concurrent.ListenableFuture;
import dagger.multibindings.IntoSet;
import dagger.producers.ProducerModule;
import dagger.producers.Produces;
import dagger.producers.ProductionComponent;
import dagger.producers.ProductionSubcomponent;
import java.util.Set;
import javax.inject.Qualifier;
/**
* Supporting types for {@code ProductionSubcomponentOnlyRequestedBySiblingTest}. {@link ChildA} is
* a direct child of the top level component, but is only requested within its sibling, not directly
* from its parent.
*/
@ProductionComponent(
modules = {
ParentDoesntUseProductionSubcomponent.ParentModule.class,
dagger.functional.producers.ExecutorModule.class
}
)
interface ParentDoesntUseProductionSubcomponent {
ChildB.Builder childBBuilder();
@ProductionSubcomponent(modules = ChildAModule.class)
interface ChildA {
@ProductionSubcomponent.Builder
interface Builder {
ChildA build();
}
ListenableFuture<Set<Class<?>>> componentHierarchy();
}
@ProductionSubcomponent(modules = ChildBModule.class)
interface ChildB {
@ProductionSubcomponent.Builder
interface Builder {
ChildB build();
}
ListenableFuture<Set<Class<?>>> componentHierarchy();
@FromChildA
ListenableFuture<Set<Class<?>>> componentHierarchyFromChildA();
}
@ProducerModule(subcomponents = {ChildA.class, ChildB.class})
class ParentModule {
@Produces
@IntoSet
static Class<?> produceComponentType() {
return ParentDoesntUseProductionSubcomponent.class;
}
}
@ProducerModule
class ChildAModule {
@Produces
@IntoSet
static Class<?> produceComponentType() {
return ChildA.class;
}
}
@ProducerModule
class ChildBModule {
@Produces
@IntoSet
static Class<?> produceComponentType() {
return ChildB.class;
}
@Produces
@FromChildA
Set<Class<?>> fromChildA(ChildA.Builder childABuilder) throws Exception {
return childABuilder.build().componentHierarchy().get();
}
}
@Qualifier
@interface FromChildA {}
}
| 860 |
313 | #ifndef BOOK_CHAPTER6_TASK_PROCESSOR_NETWORK_CLIENT_HPP
#define BOOK_CHAPTER6_TASK_PROCESSOR_NETWORK_CLIENT_HPP
#include "../02_tasks_processor_timers/tasks_processor_timers.hpp"
#include <boost/asio/ip/tcp.hpp>
#include <boost/core/noncopyable.hpp>
struct connection_with_data: boost::noncopyable {
boost::asio::ip::tcp::socket socket;
std::string data;
explicit connection_with_data(boost::asio::io_service& ios)
: socket(ios)
{}
template <class Executor> // sine Boost 1.70 IO types can construct from executors
explicit connection_with_data(Executor executor)
: socket(executor)
{}
void shutdown() {
if (!socket.is_open()) {
return;
}
boost::system::error_code ignore;
socket.shutdown(
boost::asio::ip::tcp::socket::shutdown_both,
ignore
);
socket.close(ignore);
}
~connection_with_data() {
shutdown();
}
};
#include <memory> // std::unique_ptr
typedef std::unique_ptr<connection_with_data> connection_ptr;
template <class T>
struct task_wrapped_with_connection {
private:
connection_ptr c_;
T task_unwrapped_;
public:
explicit task_wrapped_with_connection(connection_ptr&& c, const T& f)
: c_(std::move(c))
, task_unwrapped_(f)
{}
void operator()(
const boost::system::error_code& error,
std::size_t bytes_count)
{
const auto lambda = [this, &error, bytes_count]() {
this->c_->data.resize(bytes_count);
this->task_unwrapped_(std::move(this->c_), error);
};
const auto task = detail::make_task_wrapped(lambda);
task();
}
};
#include <boost/asio/write.hpp>
template <class T>
struct task_wrapped_with_connection;
template <class Functor>
void async_write_data(connection_ptr&& c, const Functor& f) {
boost::asio::ip::tcp::socket& s = c->socket;
std::string& d = c->data;
boost::asio::async_write(
s,
boost::asio::buffer(d),
task_wrapped_with_connection<Functor>(std::move(c), f)
);
}
#include <boost/asio/read.hpp>
template <class Functor>
void async_read_data(
connection_ptr&& c,
const Functor& f,
std::size_t at_least_bytes)
{
c->data.resize(at_least_bytes);
boost::asio::ip::tcp::socket& s = c->socket;
std::string& d = c->data;
char* p = (d.empty() ? 0 : &d[0]);
boost::asio::async_read(
s,
boost::asio::buffer(p, d.size()),
task_wrapped_with_connection<Functor>(std::move(c), f)
);
}
template <class Functor>
void async_read_data_at_least(
connection_ptr&& c,
const Functor& f,
std::size_t at_least_bytes,
std::size_t at_most)
{
std::string& d = c->data;
d.resize(at_most);
char* p = (at_most == 0 ? 0 : &d[0]);
boost::asio::ip::tcp::socket& s = c->socket;
boost::asio::async_read(
s,
boost::asio::buffer(p, at_most),
boost::asio::transfer_at_least(at_least_bytes),
task_wrapped_with_connection<Functor>(std::move(c), f)
);
}
namespace tp_network_client {
class tasks_processor: public tp_timers::tasks_processor {
// ...
public:
static connection_ptr create_connection(
const char* addr,
unsigned short port_num)
{
connection_ptr c( new connection_with_data(get_ios()) );
c->socket.connect(boost::asio::ip::tcp::endpoint(
boost::asio::ip::address_v4::from_string(addr),
port_num
));
return c;
}
};
} // namespace tp_network_cleint
#endif // BOOK_CHAPTER6_TASK_PROCESSOR_NETWORK_HPP
| 1,630 |
403 | /*
* Camunda Platform REST API
* OpenApi Spec for Camunda Platform REST API.
*
* The version of the OpenAPI document: 7.16.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.camunda.consulting.openapi.client.model;
import com.camunda.consulting.openapi.client.model.AtomLink;
import com.camunda.consulting.openapi.client.model.ProcessInstanceDto;
import com.camunda.consulting.openapi.client.model.ProcessInstanceWithVariablesDtoAllOf;
import com.camunda.consulting.openapi.client.model.VariableValueDto;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.annotation.JsonValue;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
/**
* Model tests for ProcessInstanceWithVariablesDto
*/
public class ProcessInstanceWithVariablesDtoTest {
private final ProcessInstanceWithVariablesDto model = new ProcessInstanceWithVariablesDto();
/**
* Model tests for ProcessInstanceWithVariablesDto
*/
@Test
public void testProcessInstanceWithVariablesDto() {
// TODO: test ProcessInstanceWithVariablesDto
}
/**
* Test the property 'variables'
*/
@Test
public void variablesTest() {
// TODO: test variables
}
/**
* Test the property 'id'
*/
@Test
public void idTest() {
// TODO: test id
}
/**
* Test the property 'definitionId'
*/
@Test
public void definitionIdTest() {
// TODO: test definitionId
}
/**
* Test the property 'businessKey'
*/
@Test
public void businessKeyTest() {
// TODO: test businessKey
}
/**
* Test the property 'caseInstanceId'
*/
@Test
public void caseInstanceIdTest() {
// TODO: test caseInstanceId
}
/**
* Test the property 'ended'
*/
@Test
public void endedTest() {
// TODO: test ended
}
/**
* Test the property 'suspended'
*/
@Test
public void suspendedTest() {
// TODO: test suspended
}
/**
* Test the property 'tenantId'
*/
@Test
public void tenantIdTest() {
// TODO: test tenantId
}
/**
* Test the property 'links'
*/
@Test
public void linksTest() {
// TODO: test links
}
}
| 1,061 |
313 | /*
* vim:ts=4:sw=4:expandtab
*
* i3 - an improved dynamic tiling window manager
* © 2009 <NAME> and contributors (see also: LICENSE)
*
* util.c: Utility functions, which can be useful everywhere within i3 (see
* also libi3).
*
*/
#pragma once
#include <config.h>
#include <err.h>
#include "data.h"
#define die(...) errx(EXIT_FAILURE, __VA_ARGS__);
#define exit_if_null(pointer, ...) \
{ \
if (pointer == NULL) \
die(__VA_ARGS__); \
}
#define STARTS_WITH(string, needle) (strncasecmp((string), (needle), strlen((needle))) == 0)
#define CIRCLEQ_NEXT_OR_NULL(head, elm, field) (CIRCLEQ_NEXT(elm, field) != CIRCLEQ_END(head) ? CIRCLEQ_NEXT(elm, field) : NULL)
#define CIRCLEQ_PREV_OR_NULL(head, elm, field) (CIRCLEQ_PREV(elm, field) != CIRCLEQ_END(head) ? CIRCLEQ_PREV(elm, field) : NULL)
#define NODES_FOREACH(head) \
for (Con *child = (Con *)-1; (child == (Con *)-1) && ((child = 0), true);) \
TAILQ_FOREACH(child, &((head)->nodes_head), nodes)
#define NODES_FOREACH_REVERSE(head) \
for (Con *child = (Con *)-1; (child == (Con *)-1) && ((child = 0), true);) \
TAILQ_FOREACH_REVERSE(child, &((head)->nodes_head), nodes_head, nodes)
/* greps the ->nodes of the given head and returns the first node that matches the given condition */
#define GREP_FIRST(dest, head, condition) \
NODES_FOREACH(head) { \
if (!(condition)) \
continue; \
\
(dest) = child; \
break; \
}
#define FREE(pointer) \
do { \
free(pointer); \
pointer = NULL; \
} while (0)
#define CALL(obj, member, ...) obj->member(obj, ##__VA_ARGS__)
#define SWAP(first, second, type) \
do { \
type tmp_SWAP = first; \
first = second; \
second = tmp_SWAP; \
} while (0)
int min(int a, int b);
int max(int a, int b);
bool rect_contains(Rect rect, uint32_t x, uint32_t y);
Rect rect_add(Rect a, Rect b);
Rect rect_sub(Rect a, Rect b);
/**
* Returns true if the name consists of only digits.
*
*/
__attribute__((pure)) bool name_is_digits(const char *name);
/**
* Set 'out' to the layout_t value for the given layout. The function
* returns true on success or false if the passed string is not a valid
* layout name.
*
*/
bool layout_from_name(const char *layout_str, layout_t *out);
/**
* Parses the workspace name as a number. Returns -1 if the workspace should be
* interpreted as a "named workspace".
*
*/
long ws_name_to_number(const char *name);
/**
* Updates *destination with new_value and returns true if it was changed or false
* if it was the same
*
*/
bool update_if_necessary(uint32_t *destination, const uint32_t new_value);
/**
* exec()s an i3 utility, for example the config file migration script or
* i3-nagbar. This function first searches $PATH for the given utility named,
* then falls back to the dirname() of the i3 executable path and then falls
* back to the dirname() of the target of /proc/self/exe (on linux).
*
* This function should be called after fork()ing.
*
* The first argument of the given argv vector will be overwritten with the
* executable name, so pass NULL.
*
* If the utility cannot be found in any of these locations, it exits with
* return code 2.
*
*/
void exec_i3_utility(char *name, char *argv[]);
/**
* Checks if the given path exists by calling stat().
*
*/
bool path_exists(const char *path);
/**
* Restart i3 in-place
* appends -a to argument list to disable autostart
*
*/
void i3_restart(bool forget_layout);
#if defined(__OpenBSD__) || defined(__APPLE__)
/**
* Taken from FreeBSD
* Find the first occurrence of the byte string s in byte string l.
*
*/
void *memmem(const void *l, size_t l_len, const void *s, size_t s_len);
#endif
/**
* Escapes the given string if a pango font is currently used.
* If the string has to be escaped, the input string will be free'd.
*
*/
char *pango_escape_markup(char *input);
/**
* Starts an i3-nagbar instance with the given parameters. Takes care of
* handling SIGCHLD and killing i3-nagbar when i3 exits.
*
* The resulting PID will be stored in *nagbar_pid and can be used with
* kill_nagbar() to kill the bar later on.
*
*/
void start_nagbar(pid_t *nagbar_pid, char *argv[]);
/**
* Kills the i3-nagbar process, if *nagbar_pid != -1.
*
* If wait_for_it is set (restarting i3), this function will waitpid(),
* otherwise, ev is assumed to handle it (reloading).
*
*/
void kill_nagbar(pid_t *nagbar_pid, bool wait_for_it);
/**
* Converts a string into a long using strtol().
* This is a convenience wrapper checking the parsing result. It returns true
* if the number could be parsed.
*/
bool parse_long(const char *str, long *out, int base);
/**
* Slurp reads path in its entirety into buf, returning the length of the file
* or -1 if the file could not be read. buf is set to a buffer of appropriate
* size, or NULL if -1 is returned.
*
*/
ssize_t slurp(const char *path, char **buf);
/**
* Convert a direction to its corresponding orientation.
*
*/
orientation_t orientation_from_direction(direction_t direction);
| 2,169 |
831 | import demo_long
# Testing a proper __module__, but no useful __qualname__ attribute.
bad_qualname = demo_long.DataDemo.__init__
def new_union(a: int | dict[str, "Foo"]) -> bool | None:
"""Testing Python 3.10's new type union syntax."""""
class Foo:
pass
| 95 |
524 | <gh_stars>100-1000
package com.github.simonpercic.oklogexamplejava.data.api.okhttp3;
import com.github.simonpercic.oklog3.OkLogInterceptor;
import java.util.Set;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
import dagger.multibindings.IntoSet;
import okhttp3.Interceptor;
import okhttp3.OkHttpClient;
import okhttp3.logging.HttpLoggingInterceptor;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
/**
* @author <NAME> <a href="https://github.com/simonpercic">https://github.com/simonpercic</a>
*/
@Module
public class DataModule3 {
private static final String ENDPOINT = "http://private-b7bc4-tvshowsapi.apiary-mock.com";
@Provides
@Singleton
public OkHttpClient provideOkHttpClient(Set<Interceptor> interceptors) {
OkHttpClient.Builder okHttpBuilder = new OkHttpClient.Builder();
if (interceptors != null && interceptors.size() > 0) {
for (Interceptor interceptor : interceptors) {
okHttpBuilder.addInterceptor(interceptor);
}
}
return okHttpBuilder.build();
}
@Provides
@Singleton
public RestApi3 provideRestApi(OkHttpClient okHttpClient) {
return new Retrofit.Builder()
.baseUrl(ENDPOINT)
.addConverterFactory(GsonConverterFactory.create())
.client(okHttpClient)
.build()
.create(RestApi3.class);
}
@Provides
@IntoSet
@Singleton
public Interceptor provideHttpLoggingInterceptor() {
HttpLoggingInterceptor httpLoggingInterceptor = new HttpLoggingInterceptor();
httpLoggingInterceptor.setLevel(HttpLoggingInterceptor.Level.BODY);
return httpLoggingInterceptor;
}
@Provides
@IntoSet
@Singleton
public Interceptor provideOkLogInterceptor() {
return OkLogInterceptor.builder().build();
}
}
| 790 |
778 | <gh_stars>100-1000
// | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: <NAME>
//
//
#if !defined(KRATOS_STRUCTURED_MESH_REFINEMENT_H_INCLUDED )
#define KRATOS_STRUCTURED_MESH_REFINEMENT_H_INCLUDED
// System includes
#include <string>
#include <iostream>
// External includes
// Project includes
#include "includes/define.h"
#include "modeler/modeler.h"
#include "spatial_containers/spatial_containers.h"
namespace Kratos
{
///@name Kratos Globals
///@{
///@}
///@name Type Definitions
///@{
///@}
///@name Enum's
///@{
///@}
///@name Functions
///@{
///@}
///@name Kratos Classes
///@{
/// Short class definition.
/** Detail class definition.
*/
class StructuredMeshRefinementModeler : public Modeler
{
public:
///@name Type Definitions
///@{
/// Pointer definition of StructuredMeshRefinementModeler
KRATOS_CLASS_POINTER_DEFINITION(StructuredMeshRefinementModeler);
typedef Modeler BaseType;
typedef Node<3> NodeType;
typedef PointerVector<NodeType> NodesVectorType;
typedef std::size_t SizeType;
///@}
///@name Life Cycle
///@{
/// Default constructor.
StructuredMeshRefinementModeler() {}
/// Destructor.
virtual ~StructuredMeshRefinementModeler() {}
///@}
///@name Operators
///@{
///@}
///@name Operations
///@{
void GenerateMesh(ModelPart& rThisModelPart, Element const& rReferenceElement)
{
ModelPart::ElementsContainerType old_elements;
old_elements.swap(rThisModelPart.Elements());
ModelPart::ConditionsContainerType old_conditions;
old_conditions.swap(rThisModelPart.Conditions());
const double tolerance = 1e-9;
// Creating a bins for searching the nodes to be collapsed
//typedef Bucket<3, NodeType, ModelPart::NodesContainerType::ContainerType> bucket_type;
typedef BinsDynamic<3, NodeType, ModelPart::NodesContainerType::ContainerType> bins_type;
bins_type nodes_bins(rThisModelPart.Nodes().ptr_begin(), rThisModelPart.Nodes().ptr_end());
ModelPart::NodesContainerType::ContainerType founded_nodes(1);
for(ModelPart::ElementIterator i_element = old_elements.begin() ; i_element != old_elements.end() ; i_element++)
{
// Getting the number of divisions;
vector<int>& number_of_divisions = i_element->GetValue(STRUCTURED_MESH_DIVISIONS);
//int node_id = rThisModelPart.Nodes().size() + 1;
int node_id = rThisModelPart.Nodes().back().Id() + 1;
int start_element_id = 1;
if(rThisModelPart.Elements().size() != 0)
start_element_id = rThisModelPart.Elements().back().Id() + 1;
// Calcualting the size of the nodes after refining this element
SizeType nodes_vector_size = number_of_divisions[0] + 1;
for(SizeType i = 1 ; i < number_of_divisions.size() ; i++)
nodes_vector_size *= number_of_divisions[i] + 1;
if(nodes_vector_size > i_element->GetGeometry().size()) // if we need to create more nodes than actual we have to refine
{
NodesVectorType nodes_vector;
NodesVectorType surface_nodes_vector;
GenerateNodes(*i_element, nodes_vector, surface_nodes_vector, number_of_divisions);
for(SizeType i = 0 ; i < nodes_vector.size() ; i++)
{
NodeType::Pointer p_node = nodes_vector(i);
if(nodes_bins.SearchInRadius(*p_node, tolerance, founded_nodes.begin(), 1) > 0) // It founds a node in the radius of tolerance so there was a node there
{
nodes_vector(i) = founded_nodes[0];
}
else
{
p_node->SetId(node_id++);
rThisModelPart.AddNode(p_node);
nodes_bins.AddPoint(p_node);
}
}
GenerateElements(rThisModelPart, *i_element, nodes_vector, surface_nodes_vector, number_of_divisions, start_element_id);
}
else
{
i_element->SetId(start_element_id);
rThisModelPart.Elements().push_back(*(i_element.base()));
}
}
for(ModelPart::ConditionIterator i_condition = old_conditions.begin() ; i_condition != old_conditions.end() ; i_condition++)
{
// Getting the number of divisions;
vector<int>& number_of_divisions = i_condition->GetValue(STRUCTURED_MESH_DIVISIONS);
int node_id = rThisModelPart.Nodes().size() + 1;
int start_condition_id = rThisModelPart.Conditions().size() + 1;
// Calcualting the size of the nodes after refining this condition
SizeType nodes_vector_size = number_of_divisions[0] + 1;
for(SizeType i = 1 ; i < i_condition->GetGeometry().LocalSpaceDimension() ; i++)
nodes_vector_size *= number_of_divisions[i] + 1;
if(nodes_vector_size > i_condition->GetGeometry().size()) // if we need to create more nodes than actual we have to refine
{
NodesVectorType nodes_vector;
NodesVectorType surface_nodes_vector;
GenerateNodes(*i_condition, nodes_vector, surface_nodes_vector, number_of_divisions);
for(SizeType i = 0 ; i < nodes_vector.size() ; i++)
{
NodeType::Pointer p_node = nodes_vector(i);
if(nodes_bins.SearchInRadius(*p_node, tolerance, founded_nodes.begin(), 1) > 0) // It founds a node in the radius of tolerance so there was a node there
{
nodes_vector(i) = founded_nodes[0];
}
else
{
p_node->SetId(node_id++);
rThisModelPart.AddNode(p_node);
nodes_bins.AddPoint(p_node);
}
}
GenerateConditions(rThisModelPart, *i_condition, nodes_vector, surface_nodes_vector, number_of_divisions, start_condition_id);
}
else
{
i_condition->SetId(start_condition_id);
rThisModelPart.Conditions().push_back(*(i_condition.base()));
}
}
}
template<class ComponentType>
void GenerateNodes(ComponentType& rThisComponent, NodesVectorType& VolumeNodesVector, NodesVectorType& SurfaceNodesVector, vector<int>& number_of_divisions)
{
SizeType local_dimension = rThisComponent.GetGeometry().LocalSpaceDimension();
Element::GeometryType::CoordinatesArrayType local_coordinates(local_dimension);
Element::GeometryType::CoordinatesArrayType global_coordinates;
if(local_dimension == 2)
{
for(int i = 0 ; i <= number_of_divisions[0] ; i++)
{
local_coordinates[0] = double(-1.00) + double(2.00 * i) / number_of_divisions[0];
for(int j = 0 ; j <= number_of_divisions[1] ; j++)
{
local_coordinates[1] = double(-1.00) + double(2.00 * j) / number_of_divisions[1];
GenerateNode(rThisComponent, VolumeNodesVector, SurfaceNodesVector, local_coordinates);
}
}
}
if(local_dimension == 3)
{
for(int i = 0 ; i <= number_of_divisions[0] ; i++)
{
local_coordinates[0] = double(-1.00) + double(2.00 * i) / number_of_divisions[0];
for(int j = 0 ; j <= number_of_divisions[1] ; j++)
{
local_coordinates[1] = double(-1.00) + double(2.00 * j) / number_of_divisions[1];
for(int k = 0 ; k <= number_of_divisions[2] ; k++)
{
local_coordinates[2] = double(-1.00) + double(2.00 * k) / number_of_divisions[2];
GenerateNode(rThisComponent, VolumeNodesVector, SurfaceNodesVector, local_coordinates);
}
}
}
}
}
template<class ComponentType>
void GenerateNode(ComponentType& rThisComponent, NodesVectorType& VolumeNodesVector, NodesVectorType& SurfaceNodesVector, Element::GeometryType::CoordinatesArrayType& rLocalCoordinates)
{
SizeType local_dimension = rThisComponent.GetGeometry().LocalSpaceDimension();
typename ComponentType::GeometryType::CoordinatesArrayType global_coordinates;
typename ComponentType::GeometryType& r_geometry = rThisComponent.GetGeometry();
const SizeType components_nodes_number = r_geometry.size();
Vector shape_functions_values(components_nodes_number);
// Getting the shape function value for given local coordinates
for(SizeType h = 0 ; h < components_nodes_number ; h++)
shape_functions_values[h] = r_geometry.ShapeFunctionValue(h, rLocalCoordinates);
// Calculating the GlobalCoordinates
r_geometry.GlobalCoordinates(global_coordinates, rLocalCoordinates);
// Interpolating the Nodal data
SizeType ndoal_data_size = r_geometry[0].SolutionStepData().TotalDataSize() / sizeof(double);
typedef VariablesListDataValueContainer::BlockType block_type;
block_type* nodal_data = new block_type[ndoal_data_size];
// Initializing to zero
for(SizeType i = 0 ; i < ndoal_data_size ; i++)
nodal_data[i] = block_type();
for(SizeType i = 0 ; i < components_nodes_number ; i++)
{
block_type* p_data = r_geometry[i].SolutionStepData().Data();
for(SizeType j = 0 ; j < ndoal_data_size ; j++)
{
nodal_data[j] += shape_functions_values[i] * p_data[j];
}
}
// Creating the new node
NodeType::Pointer p_node(new NodeType(0, global_coordinates[0], global_coordinates[1], global_coordinates[2], r_geometry[0].pGetVariablesList(), nodal_data, r_geometry[0].GetBufferSize()));
SizeType number_of_dofs = r_geometry[0].GetDofs().size();
for(NodeType::DofsContainerType::iterator i_dof = r_geometry[0].GetDofs().begin() ; i_dof != r_geometry[0].GetDofs().end() ; i_dof++)
{
VariableData const& r_dof_variable = i_dof->GetVariable();
double dof_is_fixed = double();
for(SizeType i = 0 ; i < components_nodes_number ; i++)
{
dof_is_fixed += shape_functions_values[i] * static_cast<double>(r_geometry[i].IsFixed(r_dof_variable));
}
if(dof_is_fixed > 0.999999)
{
p_node->pAddDof(*i_dof)->FixDof();
}
}
VolumeNodesVector.push_back(p_node);
}
void GenerateElements(ModelPart& rThisModelPart, Element& rThisElement, NodesVectorType& VolumeNodesVector, NodesVectorType& SurfaceNodesVector, vector<int>& number_of_divisions, SizeType StartElementId)
{
SizeType local_dimension = rThisElement.GetGeometry().LocalSpaceDimension();
Element::NodesArrayType element_nodes(rThisElement.GetGeometry().size());
SizeType nodes_number_i = number_of_divisions[0] + 1; // Number of nodes in i direction
SizeType nodes_number_j = number_of_divisions[1] + 1; // Number of nodes in j direction
SizeType nodes_number_k = number_of_divisions[2] + 1; // Number of nodes in k direction
if(local_dimension == 3)
{
for(int i = 0 ; i < number_of_divisions[0] ; i++)
{
for(int j = 0 ; j < number_of_divisions[1] ; j++)
{
for(int k = 0 ; k < number_of_divisions[2] ; k++)
{
// This is done only for Hexahedra
SizeType local_id = i * nodes_number_k * nodes_number_j + j * nodes_number_k + k;
element_nodes(0) = VolumeNodesVector(local_id);
element_nodes(1) = VolumeNodesVector(local_id + nodes_number_j * nodes_number_k);
element_nodes(2) = VolumeNodesVector(local_id + nodes_number_j * nodes_number_k + nodes_number_k);
element_nodes(3) = VolumeNodesVector(local_id + nodes_number_k);
element_nodes(4) = VolumeNodesVector(local_id + 1);
element_nodes(5) = VolumeNodesVector(local_id + nodes_number_j * nodes_number_k + 1);
element_nodes(6) = VolumeNodesVector(local_id + nodes_number_j * nodes_number_k + nodes_number_k + 1);
element_nodes(7) = VolumeNodesVector(local_id + nodes_number_k + 1);
rThisModelPart.Elements().push_back(rThisElement.Create(local_id + StartElementId, element_nodes, rThisElement.pGetProperties()));
}
}
}
}
}
void GenerateConditions(ModelPart& rThisModelPart, Condition& rThisCondition, NodesVectorType& VolumeNodesVector, NodesVectorType& SurfaceNodesVector, vector<int>& number_of_divisions, SizeType StartConditionId)
{
SizeType local_dimension = rThisCondition.GetGeometry().LocalSpaceDimension();
Condition::NodesArrayType condition_nodes(rThisCondition.GetGeometry().size());
SizeType nodes_number_i = number_of_divisions[0] + 1; // Number of nodes in i direction
SizeType nodes_number_j = number_of_divisions[1] + 1; // Number of nodes in j direction
if(local_dimension == 2)
{
for(int i = 0 ; i < number_of_divisions[0] ; i++)
{
for(int j = 0 ; j < number_of_divisions[1] ; j++)
{
// This is done only for quadrilateral
SizeType local_id = i * nodes_number_j + j;
condition_nodes(0) = VolumeNodesVector(local_id);
condition_nodes(1) = VolumeNodesVector(local_id + nodes_number_j);
condition_nodes(2) = VolumeNodesVector(local_id + nodes_number_j + 1);
condition_nodes(3) = VolumeNodesVector(local_id + 1);
rThisModelPart.Conditions().push_back(rThisCondition.Create(local_id + StartConditionId, condition_nodes, rThisCondition.pGetProperties()));
}
}
}
}
void GenerateNodes(ModelPart& ThisModelPart, SizeType NumberOfSegments)
{
}
/*
void Interpolate(Element& rThisElement
ModelPart::ElementsContainerType::iterator el_it,
const array_1d<double,3>& N,
int step_data_size,
Node<3>::Pointer pnode)
{
//Geometry element of the rOrigin_ModelPart
Geometry< Node<3> >& geom = el_it->GetGeometry();
unsigned int buffer_size = pnode->GetBufferSize();
for(unsigned int step = 0; step<buffer_size; step++)
{
//getting the data of the solution step
double* step_data = (pnode)->SolutionStepData().Data(step);
double* node0_data = geom[0].SolutionStepData().Data(step);
double* node1_data = geom[1].SolutionStepData().Data(step);
double* node2_data = geom[2].SolutionStepData().Data(step);
//copying this data in the position of the vector we are interested in
for(int j= 0; j< step_data_size; j++)
{
step_data[j] = N[0]*node0_data[j] + N[1]*node1_data[j] + N[2]*node2_data[j];
}
}
}
*/
///@}
///@name Access
///@{
///@}
///@name Inquiry
///@{
///@}
///@name Input and output
///@{
/// Turn back information as a string.
virtual std::string Info() const
{
return "StructuredMeshRefinementModeler";
}
/// Print information about this object.
virtual void PrintInfo(std::ostream& rOStream) const
{
rOStream << Info();
}
/// Print object's data.
virtual void PrintData(std::ostream& rOStream) const
{
}
///@}
///@name Friends
///@{
///@}
protected:
///@name Protected static Member Variables
///@{
///@}
///@name Protected member Variables
///@{
///@}
///@name Protected Operators
///@{
///@}
///@name Protected Operations
///@{
///@}
///@name Protected Access
///@{
///@}
///@name Protected Inquiry
///@{
///@}
///@name Protected LifeCycle
///@{
///@}
private:
///@name Static Member Variables
///@{
///@}
///@name Member Variables
///@{
///@}
///@name Private Operators
///@{
///@}
///@name Private Operations
///@{
void GenerateNodes(ModelPart& ThisModelPart, GeometryType& rGeometry, SizeType NumberOfSegments, SizeType StartNodeId)
{
double x1 = rGeometry[0][0];
double y1 = rGeometry[0][1];
double z1 = rGeometry[0][2];
double x2 = rGeometry[1][0];
double y2 = rGeometry[1][1];
double z2 = rGeometry[1][2];
double dx = (x2 - x1) / NumberOfSegments;
double dy = (y2 - y1) / NumberOfSegments;
double dz = (z2 - z1) / NumberOfSegments;
for(SizeType i = 1 ; i < NumberOfSegments - 1 ; i++)
{
ThisModelPart.CreateNewNode(StartNodeId++, x1 + i * dx, y1 + i * dy, z1 + i * dz);
}
}
///@}
///@name Private Access
///@{
///@}
///@name Private Inquiry
///@{
///@}
///@name Un accessible methods
///@{
/// Assignment operator.
StructuredMeshRefinementModeler& operator=(StructuredMeshRefinementModeler const& rOther);
/// Copy constructor.
StructuredMeshRefinementModeler(StructuredMeshRefinementModeler const& rOther);
///@}
}; // Class StructuredMeshRefinementModeler
///@}
///@name Type Definitions
///@{
///@}
///@name Input and output
///@{
/// input stream function
inline std::istream& operator >> (std::istream& rIStream,
StructuredMeshRefinementModeler& rThis);
/// output stream function
inline std::ostream& operator << (std::ostream& rOStream,
const StructuredMeshRefinementModeler& rThis)
{
rThis.PrintInfo(rOStream);
rOStream << std::endl;
rThis.PrintData(rOStream);
return rOStream;
}
///@}
} // namespace Kratos.
#endif // KRATOS_STRUCTURED_MESH_REFINEMENT_H_INCLUDED defined
| 8,763 |
359 | /*
Copyright 2018 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#import <Foundation/Foundation.h>
#import "MXJSONModel.h"
FOUNDATION_EXPORT NSString *const kMXServerNoticeTypeUsageLimitReached;
/**
* Content of a m.server_notice event message.
*/
@interface MXServerNoticeContent : MXJSONModel
/**
The kind of user limit, generally is monthly_active_user.
*/
@property (nonatomic, readonly ) NSString *limitType;
/**
An URI to contact the homeserver administrator
*/
@property (nonatomic, readonly ) NSString *adminContact;
/**
The notice type.
Like kMXServerNoticeTypeUsageLimitReached.
*/
@property (nonatomic, readonly ) NSString *serverNoticeType;
/**
Inidicate if it is a notice for usage limit reached.
*/
@property (nonatomic, readonly ) BOOL isServerNoticeUsageLimit;
@end
| 353 |
449 | <filename>Chapter4/ch4_turtle_trading.py<gh_stars>100-1000
#!/bin/python3
import pandas as pd
import numpy as np
from pandas_datareader import data
import matplotlib.pyplot as plt
def load_financial_data(start_date, end_date,output_file):
try:
df = pd.read_pickle(output_file)
print('File data found...reading GOOG data')
except FileNotFoundError:
print('File not found...downloading the GOOG data')
df = data.DataReader('GOOG', 'yahoo', start_date, end_date)
df.to_pickle(output_file)
return df
goog_data=load_financial_data(start_date='2001-01-01',
end_date = '2018-01-01',
output_file='goog_data.pkl')
def turtle_trading(financial_data, window_size):
signals = pd.DataFrame(index=financial_data.index)
signals['orders'] = 0
# window_size-days high
signals['high'] = financial_data['Adj Close'].shift(1).\
rolling(window=window_size).max()
# window_size-days low
signals['low'] = financial_data['Adj Close'].shift(1).\
rolling(window=window_size).min()
# window_size-days mean
signals['avg'] = financial_data['Adj Close'].shift(1).\
rolling(window=window_size).mean()
# entry rule : stock price > the higest value for window_size day
# stock price < the lowest value for window_size day
signals['long_entry'] = financial_data['Adj Close'] > signals.high
signals['short_entry'] = financial_data['Adj Close'] < signals.low
#exit rule : the stock price crosses the mean of past window_size days.
signals['long_exit'] = financial_data['Adj Close'] < signals.avg
signals['short_exit'] = financial_data['Adj Close'] > signals.avg
init=True
position=0
for k in range(len(signals)):
if signals['long_entry'][k] and position==0:
signals.orders.values[k] = 1
position=1
elif signals['short_entry'][k] and position==0:
signals.orders.values[k] = -1
position=-1
elif signals['short_exit'][k] and position>0:
signals.orders.values[k] = -1
position = 0
elif signals['long_exit'][k] and position < 0:
signals.orders.values[k] = 1
position = 0
else:
signals.orders.values[k] = 0
return signals
ts=turtle_trading(goog_data, 50)
fig = plt.figure()
ax1 = fig.add_subplot(111, ylabel='Google price in $')
goog_data["Adj Close"].plot(ax=ax1, color='g', lw=.5)
ts["high"].plot(ax=ax1, color='g', lw=.5)
ts["low"].plot(ax=ax1, color='r', lw=.5)
ts["avg"].plot(ax=ax1, color='b', lw=.5)
ax1.plot(ts.loc[ts.orders== 1.0].index,
goog_data["Adj Close"][ts.orders == 1.0],
'^', markersize=7, color='k')
ax1.plot(ts.loc[ts.orders== -1.0].index,
goog_data["Adj Close"][ts.orders == -1.0],
'v', markersize=7, color='k')
#
# ax1.plot(ts.loc[ts.long_entry== True].index,
# goog_data["Adj Close"][ts.long_entry== True],
# '^', markersize=7, color='k')
#
# ax1.plot(ts.loc[ts.short_entry== True].index,
# goog_data["Adj Close"][ts.short_entry== True],
# 'v', markersize=7, color='k')
#
# ax1.plot(ts.loc[ts.long_exit == True].index,
# goog_data["Adj Close"][ts.long_exit == True],
# 'v', markersize=7, color='k')
#
# ax1.plot(ts.loc[ts.short_exit == True].index,
# goog_data["Adj Close"][ts.short_exit == True],
# 'v', markersize=7, color='k')
plt.legend(["Price","Highs","Lows","Average","Buy","Sell"])
plt.title("Turtle Trading Strategy")
plt.show()
import sys
sys.exit(0)
# You are going to set your initial amount of money you want
# to invest --- here it is 10,000
initial_capital = float(10000.0)
# You are going to create a new dataframe positions
# Remember the index is still the same as signals
positions = pd.DataFrame(index=signals.index).fillna(0.0)
# You are going to buy 10 shares of MSFT when signal is 1
# You are going to sell 10 shares of MSFT when signal is -1
# You will assign these values to the column MSFT of the
# dataframe positions
positions['MSFT'] = 10 * signals['signal']
# You are now going to calculate the notional (quantity x price)
# for your portfolio. You will multiply Adj Close from
# the dataframe containing prices and the positions (10 shares)
# You will store it into the variable portfolio
portfolio = positions.multiply(financial_data['Adj Close'], axis=0)
# Add `holdings` to portfolio
portfolio['holdings'] = (positions.multiply(financial_data['Adj Close'], axis=0)).sum(axis=1)
# You will store positions.diff into pos_diff
pos_diff = positions.diff()
# You will now add a column cash in your dataframe portfolio
# which will calculate the amount of cash you have
# initial_capital - (the notional you use for your different buy/sell)
portfolio['cash'] = initial_capital - (pos_diff.multiply(financial_data['Adj Close'], axis=0)).sum(axis=1).cumsum()
# You will now add a column total to your portfolio calculating the part of holding
# and the part of cash
portfolio['total'] = portfolio['cash'] + portfolio['holdings']
# Add `returns` to portfolio
portfolio['returns'] = portfolio['total'].pct_change()
# Print the first lines of `portfolio`
print(portfolio)
| 2,102 |
574 | <filename>test/tst_compoundvar.py
import sys
import unittest
import os
import tempfile
from netCDF4 import Dataset, CompoundType
import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
# test compound data types.
FILE_NAME = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
DIM_NAME = 'phony_dim'
GROUP_NAME = 'phony_group'
VAR_NAME = 'phony_compound_var'
TYPE_NAME1 = 'cmp1'
TYPE_NAME2 = 'cmp2'
TYPE_NAME3 = 'cmp3'
TYPE_NAME4 = 'cmp4'
TYPE_NAME5 = 'cmp5'
DIM_SIZE=3
# unaligned data types (note they are nested)
dtype1=np.dtype([('i', 'i2'), ('j', 'i8')])
dtype2=np.dtype([('x', 'f4',), ('y', 'f8',(3,2))])
dtype3=np.dtype([('xx', dtype1), ('yy', dtype2)])
dtype4=np.dtype([('xxx',dtype3),('yyy','f8', (4,))])
dtype5=np.dtype([('x1', dtype1), ('y1', dtype2)])
# aligned data types
dtype1a = np.dtype({'names':['i','j'],'formats':['<i2','<i8']},align=True)
dtype2a = np.dtype({'names':['x','y'],'formats':['<f4',('<f8', (3, 2))]},align=True)
dtype3a = np.dtype({'names':['xx','yy'],'formats':[dtype1a,dtype2a]},align=True)
dtype4a = np.dtype({'names':['xxx','yyy'],'formats':[dtype3a,('f8', (4,))]},align=True)
dtype5a = np.dtype({'names':['x1','y1'],'formats':[dtype1a,dtype2a]},align=True)
data = np.zeros(DIM_SIZE,dtype4)
data['xxx']['xx']['i']=1
data['xxx']['xx']['j']=2
data['xxx']['yy']['x']=3
data['xxx']['yy']['y']=4
data['yyy'] = 5
datag = np.zeros(DIM_SIZE,dtype5)
datag['x1']['i']=10
datag['x1']['j']=20
datag['y1']['x']=30
datag['y1']['y']=40
class VariablesTestCase(unittest.TestCase):
def setUp(self):
self.file = FILE_NAME
f = Dataset(self.file, 'w')
d = f.createDimension(DIM_NAME,DIM_SIZE)
g = f.createGroup(GROUP_NAME)
# simple compound types.
cmptype1 = f.createCompoundType(dtype1, TYPE_NAME1)
cmptype2 = f.createCompoundType(dtype2, TYPE_NAME2)
# close and reopen the file to make sure compound
# type info read back in correctly.
f.close()
f = Dataset(self.file,'r+')
g = f.groups[GROUP_NAME]
# multiply nested compound types
cmptype3 = f.createCompoundType(dtype3, TYPE_NAME3)
cmptype4 = f.createCompoundType(dtype4, TYPE_NAME4)
cmptype5 = f.createCompoundType(dtype5, TYPE_NAME5)
v = f.createVariable(VAR_NAME,cmptype4, DIM_NAME)
vv = g.createVariable(VAR_NAME,cmptype5, DIM_NAME)
v[:] = data
vv[:] = datag
# try reading the data back before the file is closed
dataout = v[:]
dataoutg = vv[:]
assert (cmptype4 == dtype4a) # data type should be aligned
assert (dataout.dtype == dtype4a) # data type should be aligned
assert(list(f.cmptypes.keys()) ==\
[TYPE_NAME1,TYPE_NAME2,TYPE_NAME3,TYPE_NAME4,TYPE_NAME5])
assert_array_equal(dataout['xxx']['xx']['i'],data['xxx']['xx']['i'])
assert_array_equal(dataout['xxx']['xx']['j'],data['xxx']['xx']['j'])
assert_array_almost_equal(dataout['xxx']['yy']['x'],data['xxx']['yy']['x'])
assert_array_almost_equal(dataout['xxx']['yy']['y'],data['xxx']['yy']['y'])
assert_array_almost_equal(dataout['yyy'],data['yyy'])
assert_array_equal(dataoutg['x1']['i'],datag['x1']['i'])
assert_array_equal(dataoutg['x1']['j'],datag['x1']['j'])
assert_array_almost_equal(dataoutg['y1']['x'],datag['y1']['x'])
assert_array_almost_equal(dataoutg['y1']['y'],datag['y1']['y'])
f.close()
def tearDown(self):
# Remove the temporary files
os.remove(self.file)
#pass
def runTest(self):
"""testing compound variables"""
f = Dataset(self.file, 'r')
v = f.variables[VAR_NAME]
g = f.groups[GROUP_NAME]
vv = g.variables[VAR_NAME]
dataout = v[:]
dataoutg = vv[:]
# make sure data type is aligned
assert (f.cmptypes['cmp4'] == dtype4a)
assert(list(f.cmptypes.keys()) ==\
[TYPE_NAME1,TYPE_NAME2,TYPE_NAME3,TYPE_NAME4,TYPE_NAME5])
assert_array_equal(dataout['xxx']['xx']['i'],data['xxx']['xx']['i'])
assert_array_equal(dataout['xxx']['xx']['j'],data['xxx']['xx']['j'])
assert_array_almost_equal(dataout['xxx']['yy']['x'],data['xxx']['yy']['x'])
assert_array_almost_equal(dataout['xxx']['yy']['y'],data['xxx']['yy']['y'])
assert_array_almost_equal(dataout['yyy'],data['yyy'])
assert_array_equal(dataoutg['x1']['i'],datag['x1']['i'])
assert_array_equal(dataoutg['x1']['j'],datag['x1']['j'])
assert_array_almost_equal(dataoutg['y1']['x'],datag['y1']['x'])
assert_array_almost_equal(dataoutg['y1']['y'],datag['y1']['y'])
f.close()
# issue 773
f = Dataset(self.file,'w')
dtype = np.dtype([('observation', 'i4'),
('station_name','S80')])
dtype_nest = np.dtype([('observation', 'i4'),
('station_name','S80'),
('nested_observation',dtype)])
station_data_t1 = f.createCompoundType(dtype,'station_data1')
station_data_t2 = f.createCompoundType(dtype_nest,'station_data')
f.createDimension('station',None)
statdat = f.createVariable('station_obs', station_data_t2, ('station',))
assert(statdat.dtype == station_data_t2.dtype)
datain = np.empty(2,station_data_t2.dtype_view)
datain['observation'][:] = (123,314)
datain['station_name'][:] = ('Boulder','New York')
datain['nested_observation']['observation'][:] = (-999,999)
datain['nested_observation']['station_name'][:] = ('Boston','Chicago')
statdat[:] = datain
f.close()
f = Dataset(self.file)
dataout = f['station_obs'][:]
assert(dataout.dtype == station_data_t2.dtype_view)
assert_array_equal(datain, dataout)
f.close()
if __name__ == '__main__':
from netCDF4 import getlibversion
version = getlibversion().split()[0]
unittest.main()
| 2,989 |
787 | <filename>src/main/java/org/spongepowered/api/data/persistence/DataStore.java
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.data.persistence;
import io.leangen.geantyref.TypeToken;
import org.spongepowered.api.ResourceKey;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.data.DataHolder;
import org.spongepowered.api.data.DataManipulator;
import org.spongepowered.api.data.Key;
import org.spongepowered.api.data.value.Value;
import org.spongepowered.api.util.ResettableBuilder;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Function;
public interface DataStore {
/**
* Gets the supported {@link DataHolder} types.
*
* <p>Every returned {@link java.lang.reflect.Type} will be a subtype
* of {@link DataHolder}.</p>
*
* @return The supported dataHolder type.
*/
Collection<Type> supportedTypes();
/**
* Serializes the values of the {@link DataManipulator}
* into the {@link DataView}.
*
* @param dataManipulator The data manipulator
* @param view The data view to serialize to
* @return The view, for chaining
*/
DataView serialize(DataManipulator dataManipulator, DataView view);
/**
* Serializes the passed in {@link Value values} to the {@link DataView view}.
*
* @param values The values to serialize
* @param view The view
* @return The view, for chaining
*/
default DataView serialize(Iterable<Value<?>> values, DataView view) {
return this.serialize(DataManipulator.immutableOf(values), view);
}
/**
* Serializes the {@link Value}s.
*
* @param values The value container
* @return This view, for chaining
*/
default DataView serialize(Iterable<Value<?>> values) {
return this.serialize(DataManipulator.immutableOf(values));
}
/**
* Serializes the values of the {@link DataManipulator}.
*
* @param dataManipulator The data manipulator
* @return This view, for chaining
*/
default DataView serialize(DataManipulator dataManipulator) {
final DataView dataView = DataContainer.createNew();
this.serialize(dataManipulator, dataView);
return dataView;
}
/**
* Deserializes the data from the {@link DataView} and puts
* it in the {@link org.spongepowered.api.data.DataManipulator.Mutable}.
*
* @param dataManipulator The mutable data manipulator
* @param view The data view to deserialize
*/
void deserialize(DataManipulator.Mutable dataManipulator, DataView view);
/**
* Deserializes the {@link DataView} as a {@link org.spongepowered.api.data.DataManipulator.Mutable}.
*
* @param view The data view to deserialize
* @return The value store
*/
default DataManipulator.Mutable deserialize(DataView view) {
final DataManipulator.Mutable dataManipulator = DataManipulator.mutableOf();
this.deserialize(dataManipulator, view);
return dataManipulator;
}
/**
* Provides a {@link DataStore} for a single {@link Key}.
* <p>
* Note that default deserializers do not support {@link Collection}, {@link Map} or Array types!
* Use {@link Builder.SerializersStep#key(Key, BiConsumer, Function)} for these.
* </p>
*
* @param key The data key
* @param dataQuery The dataQuery to serialize this key under
* @param typeTokens The dataHolder types
*
* @return The new data store
*/
@SafeVarargs
@SuppressWarnings("unchecked")
static <T, V extends Value<T>> DataStore of(final Key<V> key, final DataQuery dataQuery, final TypeToken<? extends DataHolder> typeToken, final TypeToken<? extends DataHolder>... typeTokens) {
return DataStore.builder().pluginData(key.key()).holder(typeToken).holder(typeTokens).key(key, dataQuery).build();
}
/**
* Provides a {@link DataStore} for a single {@link Key}.
* <p>
* Note that default deserializers do not support {@link Collection}, {@link Map} or Array types!
* Use {@link Builder.SerializersStep#key(Key, BiConsumer, Function)} for these.
* </p>
*
* @param key The data key
* @param dataQuery The dataQuery to serialize this key under
* @param types The dataHolder types
*
* @return The new data store
*/
@SafeVarargs
@SuppressWarnings("unchecked")
static <T, V extends Value<T>> DataStore of(final Key<V> key, final DataQuery dataQuery, final Class<?extends DataHolder> type, final Class<? extends DataHolder>... types) {
return DataStore.builder().pluginData(key.key()).holder(type).holder(types).key(key, dataQuery).build();
}
/**
* Returns the {@link DataStore} builder.
*
* @return The dataStore builder.
*/
static DataStore.Builder builder() {
return Sponge.game().builderProvider().provide(Builder.class);
}
interface Builder extends ResettableBuilder<DataStore, Builder> {
/**
* Starts building a DataStore for plugin data.
* <p>Serializers and Deserializers will operate on their own {@link DataView}.</p>
*
* @param key the key under which all data from this DataStore is registered
*
* @return this builder for chaining
*/
HolderStep pluginData(ResourceKey key);
/**
* Starts building a DataStore for plugin data.
* <p>Serializers and Deserializers will operate on their own {@link DataView}.</p>
*
* @param key the key under which all data from this DataStore is registered
* @param version the content-version of your data.
*
* @return this builder for chaining
*/
UpdaterStep pluginData(ResourceKey key, int version);
/**
* Starts building a DataStore for raw data.
* <p>Serializers and deserializers will operate on the root {@link DataView}
* which includes all data from vanilla minecraft and more</p>
* <p>Consider using {@link #pluginData} instead.</p>
*
* @return this builder for chaining
*/
HolderStep vanillaData();
interface UpdaterStep extends ResettableBuilder<DataStore, Builder> {
/**
* Adds one or more content updaters
*
* @param updater the content updaters
*
* @return this builder for chaining
*/
HolderStep updater(DataContentUpdater... updater);
}
interface HolderStep extends ResettableBuilder<DataStore, Builder> {
/**
* Adds one or more allowed dataHolder types
*
* @param typeTokens the dataHolder types
*
* @return this builder for chaining
*/
@SuppressWarnings("unchecked")
SerializersStep holder(TypeToken<? extends DataHolder>... typeTokens);
/**
* Adds one or more allowed dataHolder types
*
* <p>These must not be parameterized types.</p>
*
* @param types the dataHolder types
*
* @return this builder for chaining
*/
@SuppressWarnings("unchecked")
SerializersStep holder(Class<? extends DataHolder>... types);
}
interface SerializersStep extends HolderStep, ResettableBuilder<DataStore, Builder> {
/**
* Adds one or more keys using the default implemented serializers for the given key.
* <p>The {@link Key#key()} resource-key} value will be used as DataQuery</p>
*
* @param key The data key
* @param moreKeys more data keys
*
* @return this builder for chaining
*/
@SuppressWarnings("unchecked")
Builder.EndStep keys(final Key<?> key, final Key<?>... moreKeys);
/**
* Adds the default implemented serializers for the given key.
*
* @param key The data key
* @param dataQueries The dataQuery to serialize this key under
*
* @return this builder for chaining
*/
default <T, V extends Value<T>> Builder.EndStep key(final Key<V> key, final String... dataQueries) {
if (dataQueries.length == 0) {
throw new IllegalArgumentException("dataQueries cannot be empty");
}
return this.key(key, DataQuery.of(dataQueries));
}
/**
* Adds the default implemented serializers for the given key.
*
* @param key The data key
* @param dataQuery The dataQuery to serialize this key under
*
* @return this builder for chaining
*/
<T, V extends Value<T>> Builder.EndStep key(final Key<V> key, final DataQuery dataQuery);
/**
* Adds the serializers for the given key.
*
* @param key The data key
* @param serializer the data serializer
* @param deserializer the data serserializer
*
* @return this builder for chaining
*/
<T, V extends Value<T>> Builder.EndStep key(Key<V> key, BiConsumer<DataView, T> serializer, Function<DataView, Optional<T>> deserializer);
}
interface EndStep extends SerializersStep, ResettableBuilder<DataStore, Builder> {
/**
* Builds a dataStore for given dataHolder type.
*
* @return The new data store
*/
DataStore build();
}
}
}
| 4,419 |
1,089 | package com.cengalabs.flatui.sample;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.widget.LinearLayout;
import com.cengalabs.flatui.TouchEffectAnimator;
/**
* User: eluleci
* Date: 25.09.2014
* Time: 17:23
*/
public class RippleLinearLayout extends LinearLayout {
private TouchEffectAnimator touchEffectAnimator;
public RippleLinearLayout(Context context) {
super(context);
init();
}
public RippleLinearLayout(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
// you should set a background to view for effect to be visible. in this sample, this
// linear layout contains a transparent background which is set inside the XML
// giving the view to animate on
touchEffectAnimator = new TouchEffectAnimator(this);
// enabling ripple effect. it only performs ease effect without enabling ripple effect
touchEffectAnimator.setHasRippleEffect(true);
// setting the effect color
touchEffectAnimator.setEffectColor(Color.LTGRAY);
// setting the duration
touchEffectAnimator.setAnimDuration(1000);
// setting radius to clip the effect. use it if you have a rounded background
touchEffectAnimator.setClipRadius(20);
// the view must contain an onClickListener to receive UP touch events. touchEffectAnimator
// doesn't return any value in onTouchEvent for flexibility. so it is developers
// responsibility to add a listener
setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
}
});
}
@Override
public boolean onTouchEvent(final MotionEvent event) {
// send the touch event to animator
touchEffectAnimator.onTouchEvent(event);
return super.onTouchEvent(event);
}
@Override
protected void onDraw(Canvas canvas) {
// let animator show the animation by applying changes to view's canvas
touchEffectAnimator.onDraw(canvas);
super.onDraw(canvas);
}
}
| 798 |
582 | <reponame>OLibutzki/moduliths
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moduliths.model;
import lombok.AccessLevel;
import lombok.RequiredArgsConstructor;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.moduliths.Module;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
/**
* Abstraction for low-level module information. Used to support different annotations to configure metadata about a
* module.
*
* @author <NAME>
*/
interface ModuleInformation {
public static ModuleInformation of(JavaPackage javaPackage) {
if (ClassUtils.isPresent("org.jmolecules.ddd.annotation.Module", ModuleInformation.class.getClassLoader())
&& MoleculesModule.supports(javaPackage)) {
return new MoleculesModule(javaPackage);
}
return new ModulithsModule(javaPackage);
}
String getDisplayName();
List<String> getAllowedDependencies();
@RequiredArgsConstructor(access = AccessLevel.PROTECTED)
static abstract class AbstractModuleInformation implements ModuleInformation {
private final JavaPackage javaPackage;
/*
* (non-Javadoc)
* @see org.moduliths.model.ModuleInformation#getName()
*/
@Override
public String getDisplayName() {
return javaPackage.getName();
}
}
static class MoleculesModule extends AbstractModuleInformation {
private final Optional<org.jmolecules.ddd.annotation.Module> annotation;
public static boolean supports(JavaPackage javaPackage) {
return javaPackage.getAnnotation(org.jmolecules.ddd.annotation.Module.class).isPresent();
}
public MoleculesModule(JavaPackage javaPackage) {
super(javaPackage);
this.annotation = javaPackage.getAnnotation(org.jmolecules.ddd.annotation.Module.class);
}
/*
* (non-Javadoc)
* @see org.moduliths.model.ModuleInformation#getName()
*/
@Override
public String getDisplayName() {
return annotation //
.map(org.jmolecules.ddd.annotation.Module::name) //
.filter(StringUtils::hasText)
.orElseGet(() -> annotation //
.map(org.jmolecules.ddd.annotation.Module::value) //
.filter(StringUtils::hasText) //
.orElseGet(super::getDisplayName));
}
/*
* (non-Javadoc)
* @see org.moduliths.model.ModuleInformation#getAllowedDependencies()
*/
@Override
public List<String> getAllowedDependencies() {
return Collections.emptyList();
}
}
static class ModulithsModule extends AbstractModuleInformation {
private final Optional<Module> annotation;
public static boolean supports(JavaPackage javaPackage) {
return javaPackage.getAnnotation(Module.class).isPresent();
}
public ModulithsModule(JavaPackage javaPackage) {
super(javaPackage);
this.annotation = javaPackage.getAnnotation(Module.class);
}
/*
* (non-Javadoc)
* @see org.moduliths.model.ModuleInformation.AbstractModuleInformation#getName()
*/
@Override
public String getDisplayName() {
return annotation //
.map(Module::displayName) //
.filter(StringUtils::hasText) //
.orElseGet(super::getDisplayName);
}
/*
* (non-Javadoc)
* @see org.moduliths.model.ModuleInformation#getAllowedDependencies()
*/
@Override
public List<String> getAllowedDependencies() {
return annotation //
.map(it -> Arrays.stream(it.allowedDependencies())) //
.orElse(Stream.empty()) //
.collect(Collectors.toList());
}
}
}
| 1,356 |
4,403 | package cn.hutool.db;
import cn.hutool.db.pojo.User;
import org.junit.Assert;
import org.junit.Test;
/**
* Entity测试
*
* @author looly
*
*/
public class EntityTest {
@Test
public void parseTest() {
User user = new User();
user.setId(1);
user.setName("test");
Entity entity = Entity.create("testTable").parseBean(user);
Assert.assertEquals(Integer.valueOf(1), entity.getInt("id"));
Assert.assertEquals("test", entity.getStr("name"));
}
@Test
public void parseTest2() {
User user = new User();
user.setId(1);
user.setName("test");
Entity entity = Entity.create().parseBean(user);
Assert.assertEquals(Integer.valueOf(1), entity.getInt("id"));
Assert.assertEquals("test", entity.getStr("name"));
Assert.assertEquals("user", entity.getTableName());
}
@Test
public void parseTest3() {
User user = new User();
user.setName("test");
Entity entity = Entity.create().parseBean(user, false, true);
Assert.assertFalse(entity.containsKey("id"));
Assert.assertEquals("test", entity.getStr("name"));
Assert.assertEquals("user", entity.getTableName());
}
@Test
public void entityToBeanIgnoreCaseTest() {
Entity entity = Entity.create().set("ID", 2).set("NAME", "testName");
User user = entity.toBeanIgnoreCase(User.class);
Assert.assertEquals(Integer.valueOf(2), user.getId());
Assert.assertEquals("testName", user.getName());
}
}
| 525 |
3,442 | <filename>src/net/java/sip/communicator/service/callhistory/event/CallRecordEvent.java
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.service.callhistory.event;
import java.util.*;
import net.java.sip.communicator.service.callhistory.*;
/**
* The <tt>CallRecordEvent</tt> indicates that a <tt>CallRecord</tt> has been
* received as a result of a <tt>CallHistoryQuery</tt>.
*
* @author <NAME>
*/
public class CallRecordEvent
extends EventObject
{
/**
* Serial version UID.
*/
private static final long serialVersionUID = 0L;
/**
* The <tt>CallRecord</tt> this event is about.
*/
private final CallRecord callRecord;
/**
* Creates a <tt>CallRecordEvent</tt> by specifying the parent <tt>query</tt>
* and the <tt>callRecord</tt> this event is about.
* @param query the source that triggered this event
* @param callRecord the <tt>CallRecord</tt> this event is about
*/
public CallRecordEvent(CallHistoryQuery query,
CallRecord callRecord)
{
super(query);
this.callRecord = callRecord;
}
/**
* Returns the <tt>ContactQuery</tt> that triggered this event.
* @return the <tt>ContactQuery</tt> that triggered this event
*/
public CallHistoryQuery getQuerySource()
{
return (CallHistoryQuery) source;
}
/**
* Returns the <tt>CallRecord</tt>s this event is about.
* @return the <tt>CallRecord</tt>s this event is about
*/
public CallRecord getCallRecord()
{
return callRecord;
}
}
| 763 |
672 | int __attribute__((weak)) A[] = { 1, 2, 3, 4 };
int* getA() { return A; }
| 32 |
690 | <reponame>Mr00Anderson/artemis-odb
package com.artemis;
import com.artemis.utils.Bag;
import com.artemis.utils.ImmutableBag;
import com.artemis.utils.BitVector;
/** Delegate for system invocation.
*
* Maybe you want to more granular control over system invocations, feed certain systems different deltas,
* or completely rewrite processing in favor of events. Extending this class allows you to write your own
* logic for processing system invocation.
*
* Register it with {@link WorldConfigurationBuilder#register(SystemInvocationStrategy)}
*
* Be sure to call {@link #updateEntityStates()} after the world dies.
*
* @see InvocationStrategy for the default strategy.
*/
public abstract class SystemInvocationStrategy {
/** World to operate on. */
protected World world;
protected final BitVector disabled = new BitVector();
protected Bag<BaseSystem> systems;
/** World to operate on. */
protected final void setWorld(World world) {
this.world = world;
}
/**
* Called prior to {@link #initialize()}
*/
protected void setSystems(Bag<BaseSystem> systems) {
this.systems = systems;
}
/** Called during world initialization phase. */
protected void initialize() {}
/** Call to inform all systems and subscription of world state changes. */
protected final void updateEntityStates() {
world.batchProcessor.update();
}
/**
* Process all systems.
*
* @deprecated superseded by {@link #process()}
*/
@Deprecated
protected final void process(Bag<BaseSystem> systems) {
throw new RuntimeException("wrong process method");
}
protected abstract void process();
public boolean isEnabled(BaseSystem system) {
Class<? extends BaseSystem> target = system.getClass();
ImmutableBag<BaseSystem> systems = world.getSystems();
for (int i = 0; i < systems.size(); i++) {
if (target == systems.get(i).getClass())
return !disabled.get(i);
}
throw new RuntimeException("huh?");
}
public void setEnabled(BaseSystem system, boolean value) {
Class<? extends BaseSystem> target = system.getClass();
ImmutableBag<BaseSystem> systems = world.getSystems();
for (int i = 0; i < systems.size(); i++) {
if (target == systems.get(i).getClass())
disabled.set(i, !value);
}
}
}
| 672 |
404 | """scrapli telnet/ssh/netconf client library"""
from scrapli.driver.base import AsyncDriver, Driver
from scrapli.factory import AsyncScrapli, Scrapli
__all__ = (
"AsyncDriver",
"Driver",
"AsyncScrapli",
"Scrapli",
)
| 96 |
364 | package ca.uhn.fhir.jpa.provider.r5;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.provider.r5.BaseResourceProviderR5Test;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.MethodOutcome;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.client.interceptor.SimpleRequestHeaderInterceptor;
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.interceptor.auth.AuthorizationInterceptor;
import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRule;
import ca.uhn.fhir.rest.server.interceptor.auth.IAuthRuleTester;
import ca.uhn.fhir.rest.server.interceptor.auth.PolicyEnum;
import ca.uhn.fhir.rest.server.interceptor.auth.RuleBuilder;
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r5.model.Bundle;
import org.hl7.fhir.r5.model.CodeableConcept;
import org.hl7.fhir.r5.model.Coding;
import org.hl7.fhir.r5.model.Condition;
import org.hl7.fhir.r5.model.Encounter;
import org.hl7.fhir.r5.model.Enumerations;
import org.hl7.fhir.r5.model.IdType;
import org.hl7.fhir.r5.model.Identifier;
import org.hl7.fhir.r5.model.Observation;
import org.hl7.fhir.r5.model.Organization;
import org.hl7.fhir.r5.model.Parameters;
import org.hl7.fhir.r5.model.Patient;
import org.hl7.fhir.r5.model.Practitioner;
import org.hl7.fhir.r5.model.Reference;
import org.hl7.fhir.r5.model.StringType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.fail;
public class AuthorizationInterceptorJpaR5Test extends BaseResourceProviderR5Test {
private static final Logger ourLog = LoggerFactory.getLogger(AuthorizationInterceptorJpaR5Test.class);
@BeforeEach
@Override
public void before() throws Exception {
super.before();
myDaoConfig.setAllowMultipleDelete(true);
myDaoConfig.setExpungeEnabled(true);
myDaoConfig.setDeleteExpungeEnabled(true);
}
/**
* See #503
*/
@Test
public void testDeleteIsAllowedForCompartment() {
Patient patient = new Patient();
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
patient.addName().setFamily("Tester").addGiven("Raghad");
final IIdType id = myClient.create().resource(patient).execute().getId();
Observation obsInCompartment = new Observation();
obsInCompartment.setStatus(Enumerations.ObservationStatus.FINAL);
obsInCompartment.getSubject().setReferenceElement(id.toUnqualifiedVersionless());
IIdType obsInCompartmentId = myClient.create().resource(obsInCompartment).execute().getId().toUnqualifiedVersionless();
Observation obsNotInCompartment = new Observation();
obsNotInCompartment.setStatus(Enumerations.ObservationStatus.FINAL);
IIdType obsNotInCompartmentId = myClient.create().resource(obsNotInCompartment).execute().getId().toUnqualifiedVersionless();
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().delete().resourcesOfType(Observation.class).inCompartment("Patient", id).andThen()
.deny().delete().allResources().withAnyId().andThen()
.allowAll()
.build();
}
});
myClient.delete().resourceById(obsInCompartmentId.toUnqualifiedVersionless()).execute();
try {
myClient.delete().resourceById(obsNotInCompartmentId.toUnqualifiedVersionless()).execute();
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
@Test
public void testDeleteIsAllowedForCompartmentUsingTransaction() {
Patient patient = new Patient();
patient.addIdentifier().setSystem("http://uhn.ca/mrns").setValue("100");
patient.addName().setFamily("Tester").addGiven("Raghad");
final IIdType id = myClient.create().resource(patient).execute().getId();
Observation obsInCompartment = new Observation();
obsInCompartment.setStatus(Enumerations.ObservationStatus.FINAL);
obsInCompartment.getSubject().setReferenceElement(id.toUnqualifiedVersionless());
IIdType obsInCompartmentId = myClient.create().resource(obsInCompartment).execute().getId().toUnqualifiedVersionless();
Observation obsNotInCompartment = new Observation();
obsNotInCompartment.setStatus(Enumerations.ObservationStatus.FINAL);
IIdType obsNotInCompartmentId = myClient.create().resource(obsNotInCompartment).execute().getId().toUnqualifiedVersionless();
ourRestServer.registerInterceptor(new AuthorizationInterceptor(PolicyEnum.DENY) {
@Override
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new RuleBuilder()
.allow().delete().resourcesOfType(Observation.class).inCompartment("Patient", id).andThen()
.allow().transaction().withAnyOperation().andApplyNormalRules().andThen()
.denyAll()
.build();
}
});
Bundle bundle;
bundle = new Bundle();
bundle.setType(Bundle.BundleType.TRANSACTION);
bundle.addEntry().getRequest().setMethod(Bundle.HTTPVerb.DELETE).setUrl(obsInCompartmentId.toUnqualifiedVersionless().getValue());
myClient.transaction().withBundle(bundle).execute();
try {
bundle = new Bundle();
bundle.setType(Bundle.BundleType.TRANSACTION);
bundle.addEntry().getRequest().setMethod(Bundle.HTTPVerb.DELETE).setUrl(obsNotInCompartmentId.toUnqualifiedVersionless().getValue());
myClient.transaction().withBundle(bundle).execute();
fail();
} catch (ForbiddenOperationException e) {
// good
}
}
}
| 2,358 |
459 | <reponame>jose-villegas/VCT_Engine
/*
Copyright 2005-2015 Intel Corporation. All Rights Reserved.
This file is part of Threading Building Blocks. Threading Building Blocks is free software;
you can redistribute it and/or modify it under the terms of the GNU General Public License
version 2 as published by the Free Software Foundation. Threading Building Blocks is
distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details. You should have received a copy of
the GNU General Public License along with Threading Building Blocks; if not, write to the
Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
As a special exception, you may use this file as part of a free software library without
restriction. Specifically, if other files instantiate templates or use macros or inline
functions from this file, or you compile this file and link it with other files to produce
an executable, this file does not by itself cause the resulting executable to be covered
by the GNU General Public License. This exception does not however invalidate any other
reasons why the executable file might be covered by the GNU General Public License.
*/
#include "tbb/task.h"
#include "harness.h"
#include "tbb/task_scheduler_init.h"
using tbb::task;
#if __TBB_ipf
const unsigned StackSize = 1024*1024*6;
#else /* */
const unsigned StackSize = 1024*1024*3;
#endif
// GCC and ICC on Linux store TLS data in the stack space. This test makes sure
// that the stealing limiting heuristic used by the task scheduler does not
// switch off stealing when a large amount of TLS data is reserved.
#if _MSC_VER
__declspec(thread)
#elif __linux__ || ((__MINGW32__ || __MINGW64__) && __TBB_GCC_VERSION >= 40500)
__thread
#endif
char map2[1024*1024*2];
class TestTask : public task {
public:
static volatile int completed;
task* execute() {
completed = 1;
return NULL;
};
};
volatile int TestTask::completed = 0;
void TestStealingIsEnabled () {
tbb::task_scheduler_init init(2, StackSize);
task &r = *new( task::allocate_root() ) tbb::empty_task;
task &t = *new( r.allocate_child() ) TestTask;
r.set_ref_count(2);
r.spawn(t);
int count = 0;
while ( !TestTask::completed && ++count < 6 )
Harness::Sleep(1000);
ASSERT( TestTask::completed, "Stealing is disabled or the machine is heavily oversubscribed" );
r.wait_for_all();
task::destroy(r);
}
int TestMain () {
#if !__TBB_THREAD_LOCAL_VARIABLES_PRESENT
REPORT( "Known issue: Test skipped because no compiler support for __thread keyword.\n" );
return Harness::Skipped;
#endif
if ( tbb::task_scheduler_init::default_num_threads() == 1 ) {
REPORT( "Known issue: Test requires at least 2 hardware threads.\n" );
return Harness::Skipped;
}
TestStealingIsEnabled();
return Harness::Done;
}
| 1,011 |
1,178 | from sciapp.action import FreeLineROI as Plugin | 13 |
1,144 | <gh_stars>1000+
// SPDX-License-Identifier: GPL-2.0+
/*
* (C) Copyright 2000-2009
* <NAME>, DENX Software Engineering, <EMAIL>.
*/
#include <common.h>
#include <mapmem.h>
#include <linux/sizes.h>
DECLARE_GLOBAL_DATA_PTR;
#define LINUX_ARM64_IMAGE_MAGIC 0x644d5241
/* See Documentation/arm64/booting.txt in the Linux kernel */
struct Image_header {
uint32_t code0; /* Executable code */
uint32_t code1; /* Executable code */
uint64_t text_offset; /* Image load offset, LE */
uint64_t image_size; /* Effective Image size, LE */
uint64_t flags; /* Kernel flags, LE */
uint64_t res2; /* reserved */
uint64_t res3; /* reserved */
uint64_t res4; /* reserved */
uint32_t magic; /* Magic number */
uint32_t res5;
};
int booti_setup(ulong image, ulong *relocated_addr, ulong *size,
bool force_reloc)
{
struct Image_header *ih;
uint64_t dst;
uint64_t image_size, text_offset;
*relocated_addr = image;
ih = (struct Image_header *)map_sysmem(image, 0);
if (ih->magic != le32_to_cpu(LINUX_ARM64_IMAGE_MAGIC)) {
puts("Bad Linux ARM64 Image magic!\n");
return 1;
}
/*
* Prior to Linux commit a2c1d73b94ed, the text_offset field
* is of unknown endianness. In these cases, the image_size
* field is zero, and we can assume a fixed value of 0x80000.
*/
if (ih->image_size == 0) {
puts("Image lacks image_size field, assuming 16MiB\n");
image_size = 16 << 20;
text_offset = 0x80000;
} else {
image_size = le64_to_cpu(ih->image_size);
text_offset = le64_to_cpu(ih->text_offset);
}
*size = image_size;
/*
* If bit 3 of the flags field is set, the 2MB aligned base of the
* kernel image can be anywhere in physical memory, so respect
* images->ep. Otherwise, relocate the image to the base of RAM
* since memory below it is not accessible via the linear mapping.
*/
if (!force_reloc && (le64_to_cpu(ih->flags) & BIT(3)))
dst = image - text_offset;
else
dst = gd->bd->bi_dram[0].start;
*relocated_addr = ALIGN(dst, SZ_2M) + text_offset;
unmap_sysmem(ih);
return 0;
}
| 806 |
375 | package hudson.plugins.git.extensions.impl;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Set;
import hudson.model.Result;
import hudson.model.FreeStyleProject;
import hudson.plugins.git.TestGitRepo;
import hudson.plugins.git.extensions.GitSCMExtensionTest;
import hudson.plugins.git.extensions.GitSCMExtension;
import org.jenkinsci.plugins.gitclient.Git;
import org.jenkinsci.plugins.gitclient.GitClient;
import org.junit.Test;
/**
* @author <NAME>
*/
public class CloneOptionNoTagsTest extends GitSCMExtensionTest {
FreeStyleProject project;
TestGitRepo repo;
@Override
public void before() throws Exception {
repo = new TestGitRepo("repo", tmp.newFolder(), listener);
project = setupBasicProject(repo);
}
@Override
protected GitSCMExtension getExtension() {
final boolean shallowClone = true;
final boolean dontFetchTags = true;
final String noReference = null;
final Integer noTimeout = null;
return new CloneOption(shallowClone, dontFetchTags, noReference, noTimeout);
}
@Test
public void cloningShouldNotFetchTags() throws Exception {
repo.commit("repo-init", repo.johnDoe, "repo0 initial commit");
repo.tag("v0.0.1", "a tag that should never be fetched");
assertTrue("scm polling should detect a change after initial commit", project.poll(listener).hasChanges());
build(project, Result.SUCCESS);
assertTrue("there should no tags have been cloned from remote", allTagsInProjectWorkspace().isEmpty());
}
@Test
public void detectNoChangeAfterCreatingATag() throws Exception {
repo.commit("repo-init", repo.johnDoe, "repo0 initial commit");
assertTrue("scm polling should detect a change after initial commit", project.poll(listener).hasChanges());
build(project, Result.SUCCESS);
repo.tag("v0.0.1", "a tag that should never be fetched");
assertFalse("scm polling should not detect a change after creating a tag", project.poll(listener).hasChanges());
build(project, Result.SUCCESS);
assertTrue("there should no tags have been fetched from remote", allTagsInProjectWorkspace().isEmpty());
}
private Set<String> allTagsInProjectWorkspace() throws IOException, InterruptedException {
GitClient git = Git.with(listener, null).in(project.getSomeWorkspace()).getClient();
return git.getTagNames("*");
}
}
| 888 |
375 | <filename>src/Eclipse-IDE/org.robotframework.ide.eclipse.main.plugin.tests/src/org/robotframework/ide/eclipse/main/plugin/mockmodel/NamedElement.java
/*
* Copyright 2015 Nokia Solutions and Networks
* Licensed under the Apache License, Version 2.0,
* see license.txt file for details.
*/
package org.robotframework.ide.eclipse.main.plugin.mockmodel;
import static com.google.common.collect.Lists.newArrayList;
import java.util.List;
import org.eclipse.jface.resource.ImageDescriptor;
import org.robotframework.ide.eclipse.main.plugin.model.RobotElement;
public class NamedElement implements RobotElement {
private final String name;
private final RobotElement parent;
public NamedElement(final String name) {
this(null, name);
}
public NamedElement(final RobotElement parent, final String name) {
this.parent = parent;
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public RobotElement getParent() {
return parent;
}
@Override
public List<? extends RobotElement> getChildren() {
return newArrayList();
}
@Override
public int getIndex() {
return parent == null ? -1 : parent.getChildren().indexOf(this);
}
@Override
public ImageDescriptor getImage() {
return null;
}
}
| 511 |
401 | <reponame>Zweihui/Seraphim
package com.zwh.mvparms.eyepetizer.mvp.contract;
import com.jess.arms.mvp.IView;
import com.jess.arms.mvp.IModel;
import com.zwh.mvparms.eyepetizer.mvp.model.entity.AuthorAlbumInfo;
import com.zwh.mvparms.eyepetizer.mvp.model.entity.AuthorDynamicInfo;
import com.zwh.mvparms.eyepetizer.mvp.model.entity.ReplyInfo;
import com.zwh.mvparms.eyepetizer.mvp.model.entity.ShareInfo;
import com.zwh.mvparms.eyepetizer.mvp.model.entity.VideoListInfo;
import io.reactivex.Observable;
import retrofit2.http.Query;
public interface VideoDetailContract {
//对于经常使用的关于UI的方法可以定义到BaseView中,如显示隐藏进度条,和显示文字消息
interface View extends IView {
void setData(VideoListInfo info,boolean isShowSecond);
void setReplyData(ReplyInfo info, boolean isLoadmore);
void setShareData(ShareInfo info);
void setVideoData(VideoListInfo.Video.VideoData data);
}
//Model层定义接口,外部只需关心model返回的数据,无需关心内部细节,及是否使用缓存
interface Model extends IModel {
Observable<VideoListInfo> getRelateVideoInfo(int id);
Observable<VideoListInfo> getSecondRelateVideoInfo(String path,int id,int startCount);
Observable<ReplyInfo> getAllReplyInfo(int videoId);
Observable<ReplyInfo> getMoreReplyInfo(int lastId, int videoId);
Observable<ShareInfo> getShareInfo(int identity);
Observable<VideoListInfo.Video.VideoData> getVideoData(int id);
}
} | 664 |
1,900 | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.management;
import org.ehcache.spi.service.Service;
import org.terracotta.management.model.capabilities.Capability;
import org.terracotta.management.model.context.Context;
import org.terracotta.management.model.context.ContextContainer;
import org.terracotta.management.registry.CapabilityManagementSupport;
import java.util.Collection;
import java.util.Map;
/**
* Special version of {@link ManagementRegistryService} which can be used across several {@link org.ehcache.CacheManager}.
* <p>
* This can be helpful in the case you want to access from one service all statistics, capabilities, etc of several cache managers.
*/
public interface SharedManagementService extends CapabilityManagementSupport, Service {
/**
* Get the management contexts required to make use of the
* registered objects' capabilities.
*
* @return a collection of contexts.
*/
Map<Context, ContextContainer> getContextContainers();
/**
* Get the management capabilities of all the registered objects across several cache managers.
*
* @return a map of capabilities, where the key is the alias of the cache manager
*/
Map<Context, Collection<? extends Capability>> getCapabilitiesByContext();
}
| 476 |
695 | <gh_stars>100-1000
#include "kblayout.h"
#include <assert.h>
namespace reindexer {
void KbLayout::GetVariants(const std::wstring& data, std::vector<std::pair<std::wstring, int>>& result) {
std::wstring result_string;
result_string.reserve(data.length());
for (size_t i = 0; i < data.length(); ++i) {
auto sym = data[i];
if (sym >= ruLettersStartUTF16 && sym <= ruLettersStartUTF16 + ruAlfavitSize - 1) { // russian layout
assert(sym >= ruLettersStartUTF16 && sym - ruLettersStartUTF16 < ruAlfavitSize);
result_string.push_back(ru_layout_[sym - ruLettersStartUTF16]);
} else if (sym >= allSymbolStartUTF16 && sym < allSymbolStartUTF16 + engAndAllSymbols) { // en symbol
assert(sym >= allSymbolStartUTF16 && sym - allSymbolStartUTF16 < engAndAllSymbols);
result_string.push_back(all_symbol_[sym - allSymbolStartUTF16]);
} else {
result_string.push_back(sym);
}
}
result.push_back({std::move(result_string), 90});
}
void KbLayout::setEnLayout(wchar_t sym, wchar_t data) {
assert(((sym >= allSymbolStartUTF16) && (sym - allSymbolStartUTF16 < engAndAllSymbols)));
all_symbol_[sym - allSymbolStartUTF16] = data; // '
}
void KbLayout::PrepareEnLayout() {
for (int i = 0; i < engAndAllSymbols; ++i) {
all_symbol_[i] = i + allSymbolStartUTF16;
}
for (int i = 0; i < ruAlfavitSize; ++i) {
setEnLayout(ru_layout_[i], i + ruLettersStartUTF16);
}
}
void KbLayout::PrepareRuLayout() {
ru_layout_[0] = L'f'; //а
ru_layout_[1] = L','; //б
ru_layout_[2] = L'd'; //в
ru_layout_[3] = L'u'; //г
ru_layout_[4] = L'l'; //д
ru_layout_[5] = L't'; //е
ru_layout_[6] = L';'; //ж
ru_layout_[7] = L'p'; //з
ru_layout_[8] = L'b'; //и
ru_layout_[9] = L'q'; //й
ru_layout_[10] = L'r'; //к
ru_layout_[11] = L'k'; //л
ru_layout_[12] = L'v'; //м
ru_layout_[13] = L'y'; //н
ru_layout_[14] = L'j'; //о
ru_layout_[15] = L'g'; //п
ru_layout_[16] = L'h'; //р
ru_layout_[17] = L'c'; //с
ru_layout_[18] = L'n'; //т
ru_layout_[19] = L'e'; //у
ru_layout_[20] = L'a'; //ф
ru_layout_[21] = L'['; //х
ru_layout_[22] = L'w'; //ц
ru_layout_[23] = L'x'; //ч
ru_layout_[24] = L'i'; //ш
ru_layout_[25] = L'o'; //щ
ru_layout_[26] = L']'; //ъ
ru_layout_[27] = L's'; //ы
ru_layout_[28] = L'm'; //ь
ru_layout_[29] = L'\''; //э
ru_layout_[30] = L'.'; //ю
ru_layout_[31] = L'z'; //я
}
KbLayout::KbLayout() {
PrepareRuLayout();
PrepareEnLayout();
}
} // namespace reindexer
| 1,157 |
777 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMEOS_DBUS_SERVICES_CROS_DBUS_SERVICE_H_
#define CHROMEOS_DBUS_SERVICES_CROS_DBUS_SERVICE_H_
#include <memory>
#include <vector>
#include "base/memory/ref_counted.h"
#include "base/threading/platform_thread.h"
#include "chromeos/chromeos_export.h"
namespace dbus {
class Bus;
class ExportedObject;
}
namespace chromeos {
// CrosDBusService is used to run a D-Bus service inside Chrome for Chrome
// OS. The service will be registered as follows:
//
// Service name: org.chromium.LibCrosService (kLibCrosServiceName)
// Object path: chromium/LibCrosService (kLibCrosServicePath)
//
// For historical reasons, the rather irrelevant name "LibCrosService" is
// used in the D-Bus constants such as the service name.
//
// CrosDBusService exports D-Bus methods through service provider classes
// that implement CrosDBusService::ServiceProviderInterface.
class CHROMEOS_EXPORT CrosDBusService {
public:
// CrosDBusService consists of service providers that implement this
// interface.
class ServiceProviderInterface {
public:
// Starts the service provider. |exported_object| is used to export
// D-Bus methods.
virtual void Start(
scoped_refptr<dbus::ExportedObject> exported_object) = 0;
virtual ~ServiceProviderInterface();
};
using ServiceProviderList =
std::vector<std::unique_ptr<ServiceProviderInterface>>;
// Initializes the global instance.
static void Initialize(ServiceProviderList service_providers);
// Destroys the global instance.
static void Shutdown();
protected:
virtual ~CrosDBusService();
private:
friend class CrosDBusServiceTest;
// Initializes the global instance for testing.
static void InitializeForTesting(dbus::Bus* bus,
ServiceProviderList service_providers);
};
} // namespace chromeos
#endif // CHROMEOS_DBUS_SERVICES_CROS_DBUS_SERVICE_H_
| 650 |
2,728 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExtractedPage(Model):
"""Extraction information of a single page in a
with a document.
:param number: Page number.
:type number: int
:param height: Height of the page (in pixels).
:type height: int
:param width: Width of the page (in pixels).
:type width: int
:param cluster_id: Cluster identifier.
:type cluster_id: int
:param key_value_pairs: List of Key-Value pairs extracted from the page.
:type key_value_pairs:
list[~azure.cognitiveservices.formrecognizer.models.ExtractedKeyValuePair]
:param tables: List of Tables and their information extracted from the
page.
:type tables:
list[~azure.cognitiveservices.formrecognizer.models.ExtractedTable]
"""
_attribute_map = {
'number': {'key': 'number', 'type': 'int'},
'height': {'key': 'height', 'type': 'int'},
'width': {'key': 'width', 'type': 'int'},
'cluster_id': {'key': 'clusterId', 'type': 'int'},
'key_value_pairs': {'key': 'keyValuePairs', 'type': '[ExtractedKeyValuePair]'},
'tables': {'key': 'tables', 'type': '[ExtractedTable]'},
}
def __init__(self, **kwargs):
super(ExtractedPage, self).__init__(**kwargs)
self.number = kwargs.get('number', None)
self.height = kwargs.get('height', None)
self.width = kwargs.get('width', None)
self.cluster_id = kwargs.get('cluster_id', None)
self.key_value_pairs = kwargs.get('key_value_pairs', None)
self.tables = kwargs.get('tables', None)
| 728 |
492 | #
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, <NAME> <<EMAIL>>
# License: http://snmplabs.com/pysnmp/license.html
#
# ASN.1 source file:///usr/share/snmp/mibs/INET-ADDRESS-MIB.txt
# Produced by pysmi-0.4.0 at Thu Feb 14 23:06:46 2019
#
if 'mibBuilder' not in globals():
import sys
sys.stderr.write(__doc__)
sys.exit(1)
(Integer,
OctetString,
ObjectIdentifier) = mibBuilder.importSymbols(
"ASN1",
"Integer",
"OctetString",
"ObjectIdentifier")
(NamedValues,) = mibBuilder.importSymbols(
"ASN1-ENUMERATION",
"NamedValues")
(ConstraintsIntersection,
SingleValueConstraint,
ValueRangeConstraint,
ValueSizeConstraint,
ConstraintsUnion) = mibBuilder.importSymbols(
"ASN1-REFINEMENT",
"ConstraintsIntersection",
"SingleValueConstraint",
"ValueRangeConstraint",
"ValueSizeConstraint",
"ConstraintsUnion")
(ModuleCompliance,
NotificationGroup) = mibBuilder.importSymbols(
"SNMPv2-CONF",
"ModuleCompliance",
"NotificationGroup")
(TimeTicks,
Gauge32,
Integer32,
Counter64,
MibScalar,
MibTable,
MibTableRow,
MibTableColumn,
Bits,
Counter32,
ModuleIdentity,
mib_2,
Unsigned32,
NotificationType,
IpAddress,
iso,
MibIdentifier,
ObjectIdentity) = mibBuilder.importSymbols(
"SNMPv2-SMI",
"TimeTicks",
"Gauge32",
"Integer32",
"Counter64",
"MibScalar",
"MibTable",
"MibTableRow",
"MibTableColumn",
"Bits",
"Counter32",
"ModuleIdentity",
"mib-2",
"Unsigned32",
"NotificationType",
"IpAddress",
"iso",
"MibIdentifier",
"ObjectIdentity")
(TextualConvention,
DisplayString) = mibBuilder.importSymbols(
"SNMPv2-TC",
"TextualConvention",
"DisplayString")
inetAddressMIB = ModuleIdentity(
(1, 3, 6, 1, 2, 1, 76)
)
inetAddressMIB.setRevisions(
("2005-02-04 00:00",
"2002-05-09 00:00",
"2000-06-08 00:00")
)
inetAddressMIB.setLastUpdated("200502040000Z")
if mibBuilder.loadTexts:
inetAddressMIB.setOrganization("""\
IETF Operations and Management Area
""")
inetAddressMIB.setContactInfo("""\
<NAME> (Editor) International University Bremen P.O. Box 750 561
28725 Bremen, Germany Phone: +49 421 200-3587 EMail: <EMAIL>-
<EMAIL> Send comments to <<EMAIL>>.
""")
if mibBuilder.loadTexts:
inetAddressMIB.setDescription("""\
This MIB module defines textual conventions for representing Internet
addresses. An Internet address can be an IPv4 address, an IPv6 address, or a
DNS domain name. This module also defines textual conventions for Internet port
numbers, autonomous system numbers, and the length of an Internet address
prefix. Copyright (C) The Internet Society (2005). This version of this MIB
module is part of RFC 4001, see the RFC itself for full legal notices.
""")
class InetAddressType(TextualConvention, Integer32):
status = "current"
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(
SingleValueConstraint(
*(0,
1,
2,
3,
4,
16)
)
)
namedValues = NamedValues(
*(("dns", 16),
("ipv4", 1),
("ipv4z", 3),
("ipv6", 2),
("ipv6z", 4),
("unknown", 0))
)
if mibBuilder.loadTexts:
description = """\
A value that represents a type of Internet address. unknown(0) An unknown
address type. This value MUST be used if the value of the corresponding
InetAddress object is a zero-length string. It may also be used to indicate an
IP address that is not in one of the formats defined below. ipv4(1) An IPv4
address as defined by the InetAddressIPv4 textual convention. ipv6(2) An IPv6
address as defined by the InetAddressIPv6 textual convention. ipv4z(3) A non-
global IPv4 address including a zone index as defined by the InetAddressIPv4z
textual convention. ipv6z(4) A non-global IPv6 address including a zone index
as defined by the InetAddressIPv6z textual convention. dns(16) A DNS domain
name as defined by the InetAddressDNS textual convention. Each definition of a
concrete InetAddressType value must be accompanied by a definition of a textual
convention for use with that InetAddressType. To support future extensions, the
InetAddressType textual convention SHOULD NOT be sub-typed in object type
definitions. It MAY be sub-typed in compliance statements in order to require
only a subset of these address types for a compliant implementation.
Implementations must ensure that InetAddressType objects and any dependent
objects (e.g., InetAddress objects) are consistent. An inconsistentValue error
must be generated if an attempt to change an InetAddressType object would, for
example, lead to an undefined InetAddress value. In particular,
InetAddressType/InetAddress pairs must be changed together if the address type
changes (e.g., from ipv6(2) to ipv4(1)).
"""
class InetAddressIPv4(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(4, 4),
)
if mibBuilder.loadTexts:
description = """\
Represents an IPv4 network address: Octets Contents Encoding 1-4 IPv4 address
network-byte order The corresponding InetAddressType value is ipv4(1). This
textual convention SHOULD NOT be used directly in object definitions, as it
restricts addresses to a specific format. However, if it is used, it MAY be
used either on its own or in conjunction with InetAddressType, as a pair.
"""
class InetAddressIPv6(TextualConvention, OctetString):
status = "current"
displayHint = "2x:2x:2x:2x:2x:2x:2x:2x"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(16, 16),
)
if mibBuilder.loadTexts:
description = """\
Represents an IPv6 network address: Octets Contents Encoding 1-16 IPv6 address
network-byte order The corresponding InetAddressType value is ipv6(2). This
textual convention SHOULD NOT be used directly in object definitions, as it
restricts addresses to a specific format. However, if it is used, it MAY be
used either on its own or in conjunction with InetAddressType, as a pair.
"""
class InetAddressIPv4z(TextualConvention, OctetString):
status = "current"
displayHint = "1d.1d.1d.1d%4d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(8, 8),
)
if mibBuilder.loadTexts:
description = """\
Represents a non-global IPv4 network address, together with its zone index:
Octets Contents Encoding 1-4 IPv4 address network-byte order 5-8 zone index
network-byte order The corresponding InetAddressType value is ipv4z(3). The
zone index (bytes 5-8) is used to disambiguate identical address values on
nodes that have interfaces attached to different zones of the same scope. The
zone index may contain the special value 0, which refers to the default zone
for each scope. This textual convention SHOULD NOT be used directly in object
definitions, as it restricts addresses to a specific format. However, if it is
used, it MAY be used either on its own or in conjunction with InetAddressType,
as a pair.
"""
class InetAddressIPv6z(TextualConvention, OctetString):
status = "current"
displayHint = "2x:2x:2x:2x:2x:2x:2x:2x%4d"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(20, 20),
)
if mibBuilder.loadTexts:
description = """\
Represents a non-global IPv6 network address, together with its zone index:
Octets Contents Encoding 1-16 IPv6 address network-byte order 17-20 zone index
network-byte order The corresponding InetAddressType value is ipv6z(4). The
zone index (bytes 17-20) is used to disambiguate identical address values on
nodes that have interfaces attached to different zones of the same scope. The
zone index may contain the special value 0, which refers to the default zone
for each scope. This textual convention SHOULD NOT be used directly in object
definitions, as it restricts addresses to a specific format. However, if it is
used, it MAY be used either on its own or in conjunction with InetAddressType,
as a pair.
"""
class InetAddressDNS(TextualConvention, OctetString):
status = "current"
displayHint = "255a"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(1, 255),
)
if mibBuilder.loadTexts:
description = """\
Represents a DNS domain name. The name SHOULD be fully qualified whenever
possible. The corresponding InetAddressType is dns(16). The DESCRIPTION clause
of InetAddress objects that may have InetAddressDNS values MUST fully describe
how (and when) these names are to be resolved to IP addresses. The resolution
of an InetAddressDNS value may require to query multiple DNS records (e.g., A
for IPv4 and AAAA for IPv6). The order of the resolution process and which DNS
record takes precedence depends on the configuration of the resolver. This
textual convention SHOULD NOT be used directly in object definitions, as it
restricts addresses to a specific format. However, if it is used, it MAY be
used either on its own or in conjunction with InetAddressType, as a pair.
"""
class InetAddress(TextualConvention, OctetString):
status = "current"
subtypeSpec = OctetString.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueSizeConstraint(0, 255),
)
if mibBuilder.loadTexts:
description = """\
Denotes a generic Internet address. An InetAddress value is always interpreted
within the context of an InetAddressType value. Every usage of the InetAddress
textual convention is required to specify the InetAddressType object that
provides the context. It is suggested that the InetAddressType object be
logically registered before the object(s) that use the InetAddress textual
convention, if they appear in the same logical row. The value of an InetAddress
object must always be consistent with the value of the associated
InetAddressType object. Attempts to set an InetAddress object to a value
inconsistent with the associated InetAddressType must fail with an
inconsistentValue error. When this textual convention is used as the syntax of
an index object, there may be issues with the limit of 128 sub-identifiers
specified in SMIv2, STD 58. In this case, the object definition MUST include a
'SIZE' clause to limit the number of potential instance sub-identifiers;
otherwise the applicable constraints MUST be stated in the appropriate
conceptual row DESCRIPTION clauses, or in the surrounding documentation if
there is no single DESCRIPTION clause that is appropriate.
"""
# https://tools.ietf.org/html/rfc4001#section-4.1
TYPE_MAP = {
InetAddressType.namedValues.getValue("ipv4"): InetAddressIPv4(),
InetAddressType.namedValues.getValue("ipv6"): InetAddressIPv6(),
InetAddressType.namedValues.getValue("ipv4z"): InetAddressIPv4z(),
InetAddressType.namedValues.getValue("ipv6z"): InetAddressIPv6z(),
InetAddressType.namedValues.getValue("dns"): InetAddressDNS()
}
@classmethod
def cloneFromName(cls, value, impliedFlag, parentRow, parentIndices):
for parentIndex in reversed(parentIndices):
if isinstance(parentIndex, InetAddressType):
try:
return parentRow.oidToValue(
cls.TYPE_MAP[int(parentIndex)], value, impliedFlag, parentIndices)
except KeyError:
pass
raise error.SmiError('%s object encountered without preceding '
'InetAddressType-like index: %r' % (cls.__name__, value))
def cloneAsName(self, impliedFlag, parentRow, parentIndices):
for parentIndex in reversed(parentIndices):
if isinstance(parentIndex, InetAddressType):
try:
return parentRow.valueToOid(
self.TYPE_MAP[int(parentIndex)].clone(
self.asOctets().decode('ascii')), impliedFlag, parentIndices)
except KeyError:
pass
raise error.SmiError('%s object encountered without preceding '
'InetAddressType-like index: %r' % (self.__class__.__name__, self))
class InetAddressPrefixLength(TextualConvention, Unsigned32):
status = "current"
displayHint = "d"
subtypeSpec = Unsigned32.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueRangeConstraint(0, 2040),
)
if mibBuilder.loadTexts:
description = """\
Denotes the length of a generic Internet network address prefix. A value of n
corresponds to an IP address mask that has n contiguous 1-bits from the most
significant bit (MSB), with all other bits set to 0. An InetAddressPrefixLength
value is always interpreted within the context of an InetAddressType value.
Every usage of the InetAddressPrefixLength textual convention is required to
specify the InetAddressType object that provides the context. It is suggested
that the InetAddressType object be logically registered before the object(s)
that use the InetAddressPrefixLength textual convention, if they appear in the
same logical row. InetAddressPrefixLength values larger than the maximum length
of an IP address for a specific InetAddressType are treated as the maximum
significant value applicable for the InetAddressType. The maximum significant
value is 32 for the InetAddressType 'ipv4(1)' and 'ipv4z(3)' and 128 for the
InetAddressType 'ipv6(2)' and 'ipv6z(4)'. The maximum significant value for the
InetAddressType 'dns(16)' is 0. The value zero is object-specific and must be
defined as part of the description of any object that uses this syntax.
Examples of the usage of zero might include situations where the Internet
network address prefix is unknown or does not apply. The upper bound of the
prefix length has been chosen to be consistent with the maximum size of an
InetAddress.
"""
class InetPortNumber(TextualConvention, Unsigned32):
status = "current"
displayHint = "d"
subtypeSpec = Unsigned32.subtypeSpec
subtypeSpec += ConstraintsUnion(
ValueRangeConstraint(0, 65535),
)
if mibBuilder.loadTexts:
description = """\
Represents a 16 bit port number of an Internet transport layer protocol. Port
numbers are assigned by IANA. A current list of all assignments is available
from <http://www.iana.org/>. The value zero is object-specific and must be
defined as part of the description of any object that uses this syntax.
Examples of the usage of zero might include situations where a port number is
unknown, or when the value zero is used as a wildcard in a filter.
"""
class InetAutonomousSystemNumber(TextualConvention, Unsigned32):
status = "current"
displayHint = "d"
if mibBuilder.loadTexts:
description = """\
Represents an autonomous system number that identifies an Autonomous System
(AS). An AS is a set of routers under a single technical administration, using
an interior gateway protocol and common metrics to route packets within the AS,
and using an exterior gateway protocol to route packets to other ASes'. IANA
maintains the AS number space and has delegated large parts to the regional
registries. Autonomous system numbers are currently limited to 16 bits
(0..65535). There is, however, work in progress to enlarge the autonomous
system number space to 32 bits. Therefore, this textual convention uses an
Unsigned32 value without a range restriction in order to support a larger
autonomous system number space.
"""
class InetScopeType(TextualConvention, Integer32):
status = "current"
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(
SingleValueConstraint(
*(1,
2,
3,
4,
5,
8,
14)
)
)
namedValues = NamedValues(
*(("adminLocal", 4),
("global", 14),
("interfaceLocal", 1),
("linkLocal", 2),
("organizationLocal", 8),
("siteLocal", 5),
("subnetLocal", 3))
)
if mibBuilder.loadTexts:
description = """\
Represents a scope type. This textual convention can be used in cases where a
MIB has to represent different scope types and there is no context information,
such as an InetAddress object, that implicitly defines the scope type. Note
that not all possible values have been assigned yet, but they may be assigned
in future revisions of this specification. Applications should therefore be
able to deal with values not yet assigned.
"""
class InetZoneIndex(TextualConvention, Unsigned32):
status = "current"
displayHint = "d"
if mibBuilder.loadTexts:
description = """\
A zone index identifies an instance of a zone of a specific scope. The zone
index MUST disambiguate identical address values. For link-local addresses, the
zone index will typically be the interface index (ifIndex as defined in the IF-
MIB) of the interface on which the address is configured. The zone index may
contain the special value 0, which refers to the default zone. The default zone
may be used in cases where the valid zone index is not known (e.g., when a
management application has to write a link-local IPv6 address without knowing
the interface index value). The default zone SHOULD NOT be used as an easy way
out in cases where the zone index for a non-global IPv6 address is known.
"""
class InetVersion(TextualConvention, Integer32):
status = "current"
subtypeSpec = Integer32.subtypeSpec
subtypeSpec += ConstraintsUnion(
SingleValueConstraint(
*(0,
1,
2)
)
)
namedValues = NamedValues(
*(("ipv4", 1),
("ipv6", 2),
("unknown", 0))
)
if mibBuilder.loadTexts:
description = """\
A value representing a version of the IP protocol. unknown(0) An unknown or
unspecified version of the IP protocol. ipv4(1) The IPv4 protocol as defined in
RFC 791 (STD 5). ipv6(2) The IPv6 protocol as defined in RFC 2460. Note that
this textual convention SHOULD NOT be used to distinguish different address
types associated with IP protocols. The InetAddressType has been designed for
this purpose.
"""
mibBuilder.exportSymbols(
"INET-ADDRESS-MIB",
**{"InetAddressType": InetAddressType,
"InetAddress": InetAddress,
"InetAddressIPv4": InetAddressIPv4,
"InetAddressIPv6": InetAddressIPv6,
"InetAddressIPv4z": InetAddressIPv4z,
"InetAddressIPv6z": InetAddressIPv6z,
"InetAddressDNS": InetAddressDNS,
"InetAddressPrefixLength": InetAddressPrefixLength,
"InetPortNumber": InetPortNumber,
"InetAutonomousSystemNumber": InetAutonomousSystemNumber,
"InetScopeType": InetScopeType,
"InetZoneIndex": InetZoneIndex,
"InetVersion": InetVersion,
"inetAddressMIB": inetAddressMIB}
)
| 6,511 |
5,279 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sql.impl.udf;
import static java.nio.charset.StandardCharsets.UTF_8;
import org.apache.beam.repackaged.core.org.apache.commons.lang3.ArrayUtils;
import org.apache.beam.sdk.extensions.sql.BeamSqlDslBase;
import org.apache.beam.sdk.extensions.sql.SqlTransform;
import org.apache.beam.sdk.schemas.Schema;
import org.apache.beam.sdk.schemas.Schema.FieldType;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.Row;
import org.apache.commons.codec.digest.DigestUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for UDFs. */
@RunWith(JUnit4.class)
public class BeamSalUhfSpecialTypeAndValueTest extends BeamSqlDslBase {
@Test
public void testIsInf() throws Exception {
Schema resultType =
Schema.builder()
.addBooleanField("field_1")
.addBooleanField("field_2")
.addBooleanField("field_3")
.addBooleanField("field_4")
.build();
Row resultRow = Row.withSchema(resultType).addValues(true, true, true, true).build();
String sql =
"SELECT IS_INF(f_float_1), IS_INF(f_double_1), IS_INF(f_float_2), IS_INF(f_double_2) FROM PCOLLECTION";
PCollection<Row> result = boundedInputFloatDouble.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow);
pipeline.run().waitUntilFinish();
}
@Test
public void testIsNan() throws Exception {
Schema resultType =
Schema.builder()
.addBooleanField("field_1")
.addBooleanField("field_2")
.addBooleanField("field_3")
.addBooleanField("field_4")
.build();
Row resultRow = Row.withSchema(resultType).addValues(false, false, true, true).build();
String sql =
"SELECT IS_NAN(f_float_2), IS_NAN(f_double_2), IS_NAN(f_float_3), IS_NAN(f_double_3) FROM PCOLLECTION";
PCollection<Row> result = boundedInputFloatDouble.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow);
pipeline.run().waitUntilFinish();
}
@Test
public void testLength() throws Exception {
Schema resultType = Schema.builder().addInt64Field("field").build();
Row resultRow = Row.withSchema(resultType).addValues(10L).build();
Row resultRow2 = Row.withSchema(resultType).addValues(0L).build();
Row resultRow3 = Row.withSchema(resultType).addValues(2L).build();
String sql = "SELECT LENGTH(f_bytes) FROM PCOLLECTION WHERE f_func = 'LENGTH'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
@Test
public void testReverse() throws Exception {
byte[] testByets = "абвгд".getBytes(UTF_8);
ArrayUtils.reverse(testByets);
Schema resultType = Schema.builder().addByteArrayField("field").build();
Row resultRow = Row.withSchema(resultType).addValues(testByets).build();
Row resultRow2 = Row.withSchema(resultType).addValues("\1\0".getBytes(UTF_8)).build();
Row resultRow3 = Row.withSchema(resultType).addValues("".getBytes(UTF_8)).build();
String sql = "SELECT REVERSE(f_bytes) FROM PCOLLECTION WHERE f_func = 'LENGTH'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
@Test
public void testToHex() throws Exception {
Schema resultType = Schema.builder().addStringField("field").build();
Row resultRow = Row.withSchema(resultType).addValue("666f6f626172").build();
Row resultRow2 = Row.withSchema(resultType).addValue("20").build();
Row resultRow3 = Row.withSchema(resultType).addValue("616263414243").build();
Row resultRow4 =
Row.withSchema(resultType).addValue("616263414243d0b6d189d184d096d0a9d0a4").build();
String sql = "SELECT TO_HEX(f_bytes) FROM PCOLLECTION WHERE f_func = 'TO_HEX'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow, resultRow2, resultRow3, resultRow4);
pipeline.run().waitUntilFinish();
}
@Test
public void testLeftPad() throws Exception {
Schema resultType = Schema.builder().addNullableField("field", FieldType.BYTES).build();
Row resultRow = Row.withSchema(resultType).addValue("".getBytes(UTF_8)).build();
Row resultRow2 = Row.withSchema(resultType).addValue("abcdef".getBytes(UTF_8)).build();
Row resultRow3 = Row.withSchema(resultType).addValue("abcd".getBytes(UTF_8)).build();
Row resultRow4 = Row.withSchema(resultType).addValue("defgabcdef".getBytes(UTF_8)).build();
Row resultRow5 = Row.withSchema(resultType).addValue("defghdeabc".getBytes(UTF_8)).build();
Row resultRow6 = Row.withSchema(resultType).addValue("----abc".getBytes(UTF_8)).build();
Row resultRow7 = Row.withSchema(resultType).addValue("defdefd".getBytes(UTF_8)).build();
Row resultRow8 = Row.withSchema(resultType).addValue(null).build();
String sql = "SELECT LPAD(f_bytes_one, length, f_bytes_two) FROM PCOLLECTION";
PCollection<Row> result =
boundedInputBytesPaddingTest.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result)
.containsInAnyOrder(
resultRow,
resultRow2,
resultRow3,
resultRow4,
resultRow5,
resultRow6,
resultRow7,
resultRow8);
pipeline.run().waitUntilFinish();
}
@Test
public void testRightPad() throws Exception {
Schema resultType = Schema.builder().addNullableField("field", FieldType.BYTES).build();
Row resultRow = Row.withSchema(resultType).addValue("".getBytes(UTF_8)).build();
Row resultRow2 = Row.withSchema(resultType).addValue("abcdef".getBytes(UTF_8)).build();
Row resultRow3 = Row.withSchema(resultType).addValue("abcd".getBytes(UTF_8)).build();
Row resultRow4 = Row.withSchema(resultType).addValue("abcdefdefg".getBytes(UTF_8)).build();
Row resultRow5 = Row.withSchema(resultType).addValue("abcdefghde".getBytes(UTF_8)).build();
Row resultRow6 = Row.withSchema(resultType).addValue("abc----".getBytes(UTF_8)).build();
Row resultRow7 = Row.withSchema(resultType).addValue("defdefd".getBytes(UTF_8)).build();
Row resultRow8 = Row.withSchema(resultType).addValue(null).build();
String sql = "SELECT RPAD(f_bytes_one, length, f_bytes_two) FROM PCOLLECTION";
PCollection<Row> result =
boundedInputBytesPaddingTest.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result)
.containsInAnyOrder(
resultRow,
resultRow2,
resultRow3,
resultRow4,
resultRow5,
resultRow6,
resultRow7,
resultRow8);
pipeline.run().waitUntilFinish();
}
@Test
public void testMd5() throws Exception {
Schema resultType = Schema.builder().addByteArrayField("field").build();
Row resultRow1 =
Row.withSchema(resultType).addValues(DigestUtils.md5("foobar".getBytes(UTF_8))).build();
Row resultRow2 =
Row.withSchema(resultType).addValues(DigestUtils.md5(" ".getBytes(UTF_8))).build();
Row resultRow3 =
Row.withSchema(resultType)
.addValues(DigestUtils.md5("abcABCжщфЖЩФ".getBytes(UTF_8)))
.build();
String sql = "SELECT MD5(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
@Test
public void testSHA1() throws Exception {
Schema resultType = Schema.builder().addByteArrayField("field").build();
Row resultRow1 =
Row.withSchema(resultType).addValues(DigestUtils.sha1("foobar".getBytes(UTF_8))).build();
Row resultRow2 =
Row.withSchema(resultType).addValues(DigestUtils.sha1(" ".getBytes(UTF_8))).build();
Row resultRow3 =
Row.withSchema(resultType)
.addValues(DigestUtils.sha1("abcABCжщфЖЩФ".getBytes(UTF_8)))
.build();
String sql = "SELECT SHA1(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
@Test
public void testSHA256() throws Exception {
Schema resultType = Schema.builder().addByteArrayField("field").build();
Row resultRow1 =
Row.withSchema(resultType).addValues(DigestUtils.sha256("foobar".getBytes(UTF_8))).build();
Row resultRow2 =
Row.withSchema(resultType).addValues(DigestUtils.sha256(" ".getBytes(UTF_8))).build();
Row resultRow3 =
Row.withSchema(resultType)
.addValues(DigestUtils.sha256("abcABCжщфЖЩФ".getBytes(UTF_8)))
.build();
String sql = "SELECT SHA256(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
@Test
public void testSHA512() throws Exception {
Schema resultType = Schema.builder().addByteArrayField("field").build();
Row resultRow1 =
Row.withSchema(resultType).addValues(DigestUtils.sha512("foobar".getBytes(UTF_8))).build();
Row resultRow2 =
Row.withSchema(resultType).addValues(DigestUtils.sha512(" ".getBytes(UTF_8))).build();
Row resultRow3 =
Row.withSchema(resultType)
.addValues(DigestUtils.sha512("abcABCжщфЖЩФ".getBytes(UTF_8)))
.build();
String sql = "SELECT SHA512(f_bytes) FROM PCOLLECTION WHERE f_func = 'HashingFn'";
PCollection<Row> result = boundedInputBytes.apply("testUdf", SqlTransform.query(sql));
PAssert.that(result).containsInAnyOrder(resultRow1, resultRow2, resultRow3);
pipeline.run().waitUntilFinish();
}
}
| 4,306 |
555 | <filename>proxypool-web/src/main/java/com/cv4j/proxy/web/dao/ProxyDao.java
package com.cv4j.proxy.web.dao;
import com.cv4j.proxy.web.domain.ProxyData;
import com.cv4j.proxy.web.dto.PageResult;
import com.cv4j.proxy.web.dto.ProxyDataDTO;
import com.cv4j.proxy.web.dto.QueryProxyDTO;
import java.util.List;
import java.util.Map;
/**
* Created by tony on 2017/11/16.
*/
public interface ProxyDao {
boolean saveProxy(ProxyData proxyData);
List<ProxyData> findProxyByCond(QueryProxyDTO queryProxyDTO, boolean isGetAll);
List<ProxyDataDTO> findLimitProxy(int count);
boolean updateProxyById(String id);
boolean deleteProxyById(String id);
void deleteAll();
Map<String, Class> getProxyMap();
List<ProxyData> takeRandomTenProxy();
}
| 328 |
6,989 | <reponame>jochenater/catboost
#include "quantized_features_info.h"
#include <catboost/libs/cat_feature/cat_feature.h>
#include <library/cpp/dbg_output/dump.h>
#include <util/generic/cast.h>
#include <util/generic/ptr.h>
#include <util/generic/xrange.h>
#include <util/stream/file.h>
#include <util/system/rwlock.h>
#include <util/system/yassert.h>
using namespace NCB;
TQuantizedFeaturesInfoPtr MakeQuantizedFeaturesInfo(
const TFeaturesLayout& featuresLayout
) {
return MakeIntrusive<TQuantizedFeaturesInfo>(
featuresLayout,
/*ignoredFeatures*/ TConstArrayRef<ui32>(),
NCatboostOptions::TBinarizationOptions()
);
}
TQuantizedFeaturesInfoPtr MakeEstimatedQuantizedFeaturesInfo(i32 featureCount) {
/* In fact they are 1/256, 2/256 ... 255/256 but they are not really used now so they are left
* constant for simplicity
*/
static const TVector<float> STANDARD_BORDERS(255, 1.0f);
TFeaturesLayout estimatedFeaturesLayout(featureCount);
auto estimatedQuantizedFeaturesInfo = MakeIntrusive<TQuantizedFeaturesInfo>();
estimatedQuantizedFeaturesInfo->Init(&estimatedFeaturesLayout);
for (auto featureIdx : xrange(SafeIntegerCast<ui32>(featureCount))) {
estimatedQuantizedFeaturesInfo->SetBorders(
TFloatFeatureIdx(featureIdx),
TVector<float>(STANDARD_BORDERS)
);
}
return estimatedQuantizedFeaturesInfo;
}
void UpdateCatFeaturesInfo(
TConstArrayRef<i32> catFeaturesUniqValueCounts,
bool isInitialization,
NCB::TQuantizedFeaturesInfo* quantizedFeaturesInfo
) {
TVector<ui32> integerValueHashes; // hashes for "0", "1" ... etc.
auto& featuresLayout = *(quantizedFeaturesInfo->GetFeaturesLayout());
featuresLayout.IterateOverAvailableFeatures<EFeatureType::Categorical>(
[&] (TCatFeatureIdx catFeatureIdx) {
auto flatFeatureIdx = featuresLayout.GetExternalFeatureIdx(
*catFeatureIdx,
EFeatureType::Categorical
);
i32 uniqValuesCount = catFeaturesUniqValueCounts[flatFeatureIdx];
Y_ASSERT(uniqValuesCount > 0);
if ((size_t)uniqValuesCount >= integerValueHashes.size()) {
for (auto i : xrange((ui32)integerValueHashes.size(), (ui32)uniqValuesCount)) {
integerValueHashes.push_back(CalcCatFeatureHash(ToString(i)));
}
}
TCatFeaturePerfectHash catFeaturePerfectHash;
for (auto i : xrange((ui32)uniqValuesCount)) {
catFeaturePerfectHash.Map.emplace(integerValueHashes[i], TValueWithCount{i, 1});
}
{
TWriteGuard guard(quantizedFeaturesInfo->GetRWMutex());
if (isInitialization && (uniqValuesCount == 1)) {
// it is safe - we've already got to this element in iteration
featuresLayout.IgnoreExternalFeature(flatFeatureIdx);
} else {
quantizedFeaturesInfo->UpdateCategoricalFeaturesPerfectHash(
catFeatureIdx,
std::move(catFeaturePerfectHash)
);
}
}
}
);
}
i32 CalcMaxCategoricalFeaturesUniqueValuesCountOnLearn(
const TQuantizedFeaturesInfo& quantizedFeaturesInfo
) {
return SafeIntegerCast<i32>(quantizedFeaturesInfo.CalcMaxCategoricalFeaturesUniqueValuesCountOnLearn());
}
TVector<i32> GetCategoricalFeaturesUniqueValuesCounts(
const NCB::TQuantizedFeaturesInfo& quantizedFeaturesInfo
) {
const auto& featuresLayout = *(quantizedFeaturesInfo.GetFeaturesLayout());
TVector<i32> catFeaturesUniqueValuesCounts(featuresLayout.GetExternalFeatureCount(), 0);
featuresLayout.IterateOverAvailableFeatures<EFeatureType::Categorical>(
[&] (TCatFeatureIdx catFeatureIdx) {
auto flatFeatureIdx = featuresLayout.GetExternalFeatureIdx(
*catFeatureIdx,
EFeatureType::Categorical
);
catFeaturesUniqueValuesCounts[flatFeatureIdx]
= quantizedFeaturesInfo.GetUniqueValuesCounts(catFeatureIdx).OnAll;
}
);
return catFeaturesUniqueValuesCounts;
}
void DbgDump(const NCB::TQuantizedFeaturesInfo& quantizedFeaturesInfo, const TString& fileName) {
TFileOutput out(fileName);
out << DbgDump(quantizedFeaturesInfo);
}
| 1,812 |
5,169 | <filename>Specs/2/3/4/AFOFFMpegLib/0.0.22/AFOFFMpegLib.podspec.json<gh_stars>1000+
{
"name": "AFOFFMpegLib",
"version": "0.0.22",
"summary": "decoding.",
"description": "Use soft decode to decode video.",
"homepage": "https://github.com/PangDuTechnology/AFOFFMpegLib.git",
"license": "MIT",
"authors": {
"PangDu": "<EMAIL>"
},
"platforms": {
"ios": "8.0"
},
"source": {
"git": "https://github.com/PangDuTechnology/AFOFFMpegLib.git",
"tag": "0.0.22"
},
"header_mappings_dir": "FFmpeg/include",
"preserve_paths": "FFmpeg/**/**",
"vendored_libraries": [
"FFmpeg/lib/libavcodec.a",
"FFmpeg/lib/libavformat.a",
"FFmpeg/lib/libavutil.a",
"FFmpeg/lib/libswscale.a",
"FFmpeg/lib/libswresample.a"
],
"frameworks": [
"CoreMedia",
"AVFoundation",
"AudioToolbox",
"VideoToolbox"
],
"libraries": [
"c",
"c++",
"z",
"iconv",
"bz2"
],
"requires_arc": true,
"static_framework": true,
"xcconfig": {
"HEADER_SEARCH_PATHS": "\"$(SDKROOT)/FFmpeg/include/**/*.h\"",
"LIBRARY_SEARCH_PATHS": "\"$(SDKROOT)/FFmpeg/lib\""
},
"subspecs": [
{
"name": "include",
"subspecs": [
{
"name": "libavcodec",
"source_files": "FFmpeg/include/libavcodec/*.h",
"public_header_files": "FFmpeg/include/libavcodec/*.h"
},
{
"name": "libavformat",
"source_files": "FFmpeg/include/libavformat/*.h",
"public_header_files": "FFmpeg/include/libavformat/*.h"
},
{
"name": "libavutil",
"source_files": "FFmpeg/include/libavutil/*.h",
"public_header_files": "FFmpeg/include/libavutil/*.h"
},
{
"name": "libswresample",
"source_files": "FFmpeg/include/libswresample/*.h",
"public_header_files": "FFmpeg/include/libswresample/*.h"
},
{
"name": "libswscale",
"source_files": "FFmpeg/include/libswscale/*.h",
"public_header_files": "FFmpeg/include/libswscale/*.h"
}
]
}
]
}
| 1,069 |
439 | import java.math.BigInteger;
import java.util.stream.IntStream;
class Grains {
BigInteger grainsOnSquare(final int square) {
if (1 <= square && square <= 64) {
return BigInteger.valueOf(2).pow(square - 1);
} else {
throw new IllegalArgumentException("square must be between 1 and 64");
}
}
BigInteger grainsOnBoard() {
return IntStream.rangeClosed(1, 64)
.mapToObj(this::grainsOnSquare)
.reduce(
BigInteger.valueOf(0),
BigInteger::add);
}
}
| 289 |
1,433 | /*
* //******************************************************************
* //
* // Copyright 2015 Intel Corporation.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
* //
* // Licensed under the Apache License, Version 2.0 (the "License");
* // you may not use this file except in compliance with the License.
* // You may obtain a copy of the License at
* //
* // http://www.apache.org/licenses/LICENSE-2.0
* //
* // Unless required by applicable law or agreed to in writing, software
* // distributed under the License is distributed on an "AS IS" BASIS,
* // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* // See the License for the specific language governing permissions and
* // limitations under the License.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
*/
package org.iotivity.base;
public enum ObserveAction {
REGISTER(0),
UNREGISTER(1),;
private int value;
private ObserveAction(int value) {
this.value = value;
}
public int getValue() {
return this.value;
}
public static ObserveAction get(int val) {
for (ObserveAction observeAction : ObserveAction.values()) {
if (observeAction.getValue() == val)
return observeAction;
}
throw new IllegalArgumentException("Unexpected ObserveAction value");
}
}
| 431 |
5,169 | {
"name": "NavKit",
"version": "0.2",
"license": "MIT",
"summary": "Simple and integrated way to customize navigation bar experience on iOS app.",
"homepage": "https://github.com/wilbertliu/NavKit",
"social_media_url": "https://twitter.com/wilbertliu",
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/wilbertliu/NavKit.git",
"tag": "0.2"
},
"platforms": {
"ios": "8.0"
},
"source_files": [
"NavKit",
"NavKit/**/*.{h,m,swift}"
],
"pushed_with_swift_version": "3.0"
}
| 238 |
1,273 | <gh_stars>1000+
package org.broadinstitute.hellbender.tools.copynumber.formats.records;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.Utils;
public class CalledLegacySegment extends LegacySegment {
private final CalledCopyRatioSegment.Call call;
public CalledLegacySegment(final String sampleName, final SimpleInterval interval, final int numProbes,
final double segmentMean,
final CalledCopyRatioSegment.Call call) {
super(sampleName, interval, numProbes, segmentMean);
Utils.nonNull(call, "Cannot initialize a called legacy segment with a null call.");
this.call = call;
}
public CalledCopyRatioSegment.Call getCall() {
return call;
}
@Override
public final boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
final CalledLegacySegment that = (CalledLegacySegment) o;
return call == that.call;
}
@Override
public final int hashCode() {
int result = super.hashCode();
result = 31 * result + call.hashCode();
return result;
}
@Override
public final String toString() {
return "CalledLegacySegment{" +
"interval=" + getInterval() +
", numPoints=" + getNumProbes() +
", meanLog2CopyRatio=" + getSegmentMean() +
", call=" + call +
'}';
}
}
| 739 |
1,615 | //
// Created by Xiong.Fangyu 2019/06/03.
//
#ifndef __M_MEM_H
#define __M_MEM_H
#include <stdlib.h>
/**
* 申请和释放内存入口,使用此函数能在开启 J_API_INFO 和 MEM_INFO 时
* 记录内存使用和调用栈信息,方便查找内存泄漏
* ns > 0 malloc或realloc内存
* ns == 0 free src
*/
void * m_malloc(void* src, size_t os, size_t ns);
///参考实现
void * m_malloc(void* src, size_t os, size_t ns) {
if (ns == 0) {
free(src);
return NULL;
}
void * nb = realloc(src, ns);
return nb;
}
#endif | 312 |
1,350 | <filename>sdk/resourcehealth/azure-resourcemanager-resourcehealth/src/main/java/com/azure/resourcemanager/resourcehealth/models/EventPropertiesRecommendedActions.java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.resourcehealth.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
/** Recommended actions of event. */
@Fluent
public final class EventPropertiesRecommendedActions {
@JsonIgnore private final ClientLogger logger = new ClientLogger(EventPropertiesRecommendedActions.class);
/*
* Recommended action title for the service health event.
*/
@JsonProperty(value = "message")
private String message;
/*
* Recommended actions for the service health event.
*/
@JsonProperty(value = "actions")
private List<EventPropertiesRecommendedActionsItem> actions;
/*
* Recommended action locale for the service health event.
*/
@JsonProperty(value = "localeCode")
private String localeCode;
/**
* Get the message property: Recommended action title for the service health event.
*
* @return the message value.
*/
public String message() {
return this.message;
}
/**
* Set the message property: Recommended action title for the service health event.
*
* @param message the message value to set.
* @return the EventPropertiesRecommendedActions object itself.
*/
public EventPropertiesRecommendedActions withMessage(String message) {
this.message = message;
return this;
}
/**
* Get the actions property: Recommended actions for the service health event.
*
* @return the actions value.
*/
public List<EventPropertiesRecommendedActionsItem> actions() {
return this.actions;
}
/**
* Set the actions property: Recommended actions for the service health event.
*
* @param actions the actions value to set.
* @return the EventPropertiesRecommendedActions object itself.
*/
public EventPropertiesRecommendedActions withActions(List<EventPropertiesRecommendedActionsItem> actions) {
this.actions = actions;
return this;
}
/**
* Get the localeCode property: Recommended action locale for the service health event.
*
* @return the localeCode value.
*/
public String localeCode() {
return this.localeCode;
}
/**
* Set the localeCode property: Recommended action locale for the service health event.
*
* @param localeCode the localeCode value to set.
* @return the EventPropertiesRecommendedActions object itself.
*/
public EventPropertiesRecommendedActions withLocaleCode(String localeCode) {
this.localeCode = localeCode;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (actions() != null) {
actions().forEach(e -> e.validate());
}
}
}
| 1,112 |
451 | <filename>applis/OLD-MICMAC-OLD/cAppliMICMAC_GPU.cpp
/*Header-MicMac-eLiSe-25/06/2007
MicMac : Multi Image Correspondances par Methodes Automatiques de Correlation
eLiSe : ELements of an Image Software Environnement
www.micmac.ign.fr
Copyright : Institut Geographique National
Author : <NAME>
Contributors : <NAME>, <NAME>.
[1] <NAME>, <NAME>.
"A multiresolution and optimization-based image matching approach:
An application to surface reconstruction from SPOT5-HRS stereo imagery."
In IAPRS vol XXXVI-1/W41 in ISPRS Workshop On Topographic Mapping From Space
(With Special Emphasis on Small Satellites), Ankara, Turquie, 02-2006.
[2] <NAME>, "MicMac, un lociel de mise en correspondance
d'images, adapte au contexte geograhique" to appears in
Bulletin d'information de l'Institut Geographique National, 2007.
Francais :
MicMac est un logiciel de mise en correspondance d'image adapte
au contexte de recherche en information geographique. Il s'appuie sur
la bibliotheque de manipulation d'image eLiSe. Il est distibue sous la
licences Cecill-B. Voir en bas de fichier et http://www.cecill.info.
English :
MicMac is an open source software specialized in image matching
for research in geographic information. MicMac is built on the
eLiSe image library. MicMac is governed by the "Cecill-B licence".
See below and http://www.cecill.info.
Header-MicMac-eLiSe-25/06/2007*/
#include "general/all.h"
#include "MICMAC.h"
#include "im_tpl/elise_ex_oper_assoc_exter.h"
/*
namespace std
{
bool operator < (const Pt3di & aP1,const Pt3di & aP2);
};
*/
namespace NS_ParamMICMAC
{
template <class Type,class TBase>
Type ** ImDec
(
std::vector<Type *> & aV,
Im2D<Type,TBase> anIm,
const Box2di & aBox,
const Pt2di & aSzV
)
{
aV.clear();
Type ** aDIm = anIm.data();
Pt2di aSzIm = anIm.sz();
for (int aY = 0 ; aY<aSzIm.y ; aY++)
{
aV.push_back(aDIm[aY] - aBox._p0.x+aSzV.x);
}
return &(aV[0]) - aBox._p0.y+aSzV.y;
//return anIm.data();
}
cGPU_LoadedImGeom::cGPU_LoadedImGeom
(
const cAppliMICMAC & anAppli,
cPriseDeVue* aPDV,
int aNbVals,
const Box2di & aBox,
const Pt2di &aSzV
) :
mAppli (anAppli),
mPDV (aPDV),
mLI (&aPDV->LoadedIm()),
mGeom (&aPDV->Geom()),
mSzV (aSzV),
mSzOrtho (aBox.sz()+ mSzV*2),
mImOrtho (mSzOrtho.x,mSzOrtho.y),
mDImOrtho (mImOrtho),
mDOrtho (ImDec(mVOrtho,mImOrtho,aBox,aSzV)),
mImSomO (mSzOrtho.x,mSzOrtho.y),
mDImSomO (mImSomO),
mDSomO (ImDec(mVSomO,mImSomO,aBox,aSzV)),
mImSomO2 (mSzOrtho.x,mSzOrtho.y),
mDImSomO2 (mImSomO2),
mDSomO2 (ImDec(mVSomO2,mImSomO2,aBox,aSzV)),
mImSom12 (mSzOrtho.x,mSzOrtho.y),
mDImSom12 (mImSom12),
mDSom12 (ImDec(mVSom12,mImSom12,aBox,aSzV)),
mImOK_Ortho (mSzOrtho.x,mSzOrtho.y),
mDImOK_Ortho (mImOK_Ortho),
mDOK_Ortho (ImDec(mVImOK_Ortho,mImOK_Ortho,aBox,aSzV)),
mNbVals ((1+2*aSzV.x) * (1+2*aSzV.y)),
mVals (aNbVals),
mDataIm (mLI->DataFloatIm()),
mSzX (mLI->SzIm().x),
mSzY (mLI->SzIm().y),
mImMasq (mLI->DataMasqIm()),
mImPC (mLI->DataImPC()),
mSeuilPC (mLI->SeuilPC()),
mUsePC (mLI->UsePC())
{
ELISE_ASSERT
(
aPDV->NumEquiv()==0,
"Ne gere pas les classe d'equiv image en GPU"
);
}
tGpuF ** cGPU_LoadedImGeom::DataOrtho() {return mDOrtho; }
U_INT1 ** cGPU_LoadedImGeom::DataOKOrtho() {return mDOK_Ortho; }
Im2D_U_INT1 cGPU_LoadedImGeom::ImOK_Ortho() {return mImOK_Ortho;}
tImGpu cGPU_LoadedImGeom::ImOrtho() {return mImOrtho; }
tImGpu cGPU_LoadedImGeom::ImSomO() {return mImSomO; }
tImGpu cGPU_LoadedImGeom::ImSomO2() {return mImSomO2; }
tImGpu cGPU_LoadedImGeom::ImSom12() {return mImSom12; }
bool cGPU_LoadedImGeom::InitValNorms(int anX,int anY)
{
if (! mDOK_Ortho[anY][anX])
return false;
mMoy = mDSomO[anY][anX] / mNbVals;
// double aDMoy = mEpsAddMoy + mMoy * mEpsMulMoy;
double aDMoy = mAppli.DeltaMoy(mMoy);
mSigma = mDSomO2[anY][anX] / mNbVals - QSquare(mMoy) + QSquare(aDMoy);
mMoy += aDMoy;
if (mSigma < mAppli.AhEpsilon())
return false;
mSigma = sqrt(mSigma);
return true;
}
bool cGPU_LoadedImGeom::Correl(double & aCorrel,int anX,int anY,const cGPU_LoadedImGeom & aGeoJ) const
{
if (! mDOK_Ortho[anY][anX])
return false;
double aMI = mDSomO [anY][anX] /mNbVals;
double aDmI = mAppli.DeltaMoy(aMI);
double aMII = mDSomO2[anY][anX] /mNbVals - ElSquare(aMI) + ElSquare(aDmI);
if (aMII < mAppli.AhEpsilon())
return false;
double aMJ = aGeoJ.mDSomO [anY][anX] /mNbVals;
double aDmJ = mAppli.DeltaMoy(aMJ);
double aMJJ = aGeoJ.mDSomO2[anY][anX] /mNbVals - ElSquare(aMJ) + ElSquare(aDmJ);
if (aMJJ < mAppli.AhEpsilon())
return false;
double aMIJ = mDSom12[anY][anX] /mNbVals - aMI * aMJ + aDmI*aDmJ;
aCorrel = aMIJ / sqrt(aMII*aMJJ);
return true;
}
//
// Fonction de correlation preparant une version GPU. Pour l'instant on se
// reduit a la version qui fonctionne pixel par pixel (sans redressement global),
// de toute facon il faudra l'ecrire et elle est plus simple.
//
// Une fois les parametres d'environnement decode et traduits en donnees
// de bas niveau ( des tableau bi-dim de valeur numerique : entier, flottant et bits)
// les communications, dans le corps de la boucle, avec l'environnement MicMac sont reduites
// a trois appels :
//
// [1] Pt2dr aPIm = aGeom->CurObj2Im(aPTer,&aZReel);
//
// Appelle la fonction virtuelle de projection associee a chaque
// descripteur de geometrie de l'image.
//
// [2] mSurfOpt->SetCout(Pt2di(anX,anY),&aZInt,aDefCost);
//
// Appelle la fonction virtuelle de remplissage de cout
// de l'optimiseur actuellement utilise
//
//
// [3] double aVal = mInterpolTabule.GetVal(aDataIm,aPIm);
//
// Utilise l'interpolateur courant. Pour l'instant l'interpolateur
// est en dur quand on fonctionne en GPU
//
void cAppliMICMAC::DoInitAdHoc(const Box2di & aBox,const Pt2di & aSzV)
{
mX0Ter = aBox._p0.x;
mX1Ter = aBox._p1.x;
mY0Ter = aBox._p0.y;
mY1Ter = aBox._p1.y;
mCurSzV = aSzV;
mDilX0Ter = mX0Ter - aSzV.x;
mDilY0Ter = mY0Ter - aSzV.y;
mDilX1Ter = mX1Ter + aSzV.x;
mDilY1Ter = mY1Ter + aSzV.y;
mCurSzDil = Pt2di(mDilX1Ter-mDilX0Ter, mDilY1Ter-mDilY0Ter);
mImOkTerCur.Resize(mCurSzDil);
mTImOkTerCur = TIm2D<U_INT1,INT> (mImOkTerCur);
mDOkTer = ImDec(mVDOkTer,mImOkTerCur,aBox,aSzV);
mImOkTerDil.Resize(mCurSzDil);
mTImOkTerDil = TIm2D<U_INT1,INT> (mImOkTerDil);
mDOkTerDil = ImDec(mVDOkTerDil,mImOkTerDil,aBox,aSzV);
Pt2di aSzAll1 = mAll1ImOkTerDil.sz();
if ((aSzAll1.x < mCurSzDil.x ) || (aSzAll1.y<mCurSzDil.y))
{
mAll1ImOkTerDil = Im2D_U_INT1(mCurSzDil.x,mCurSzDil.y,1);
}
mAll1TImOkTerDil = TIm2D<U_INT1,INT>(mAll1ImOkTerDil);
mAll1DOkTerDil = ImDec(mAll1VDOkTerDil,mAll1ImOkTerDil,aBox,aSzV);
mTabZMin = mLTer->GPULowLevel_ZMin();
mTabZMax = mLTer->GPULowLevel_ZMax();
mTabMasqTER = mLTer->GPULowLevel_MasqTer();
mAhDefCost = mStatGlob->CorrelToCout(mDefCorr);
mAhEpsilon = EpsilonCorrelation().Val();
mGeomDFPx.SetOriResolPlani(mOriPlani,mStepPlani);
mOrigineZ = mGeomDFPx.OrigineAlti();
mStepZ = mGeomDFPx.ResolutionAlti();
mFirstZIsInit = false;
// mVLI.clear();
DeleteAndClear(mVLI);
for
(
tCsteIterPDV itFI=mPDVBoxGlobAct.begin();
itFI!=mPDVBoxGlobAct.end();
itFI++
)
{
mVLI.push_back(new cGPU_LoadedImGeom(*this,*itFI,mNbPtsWFixe,aBox,mPtSzWFixe));
}
mNbIm = mVLI.size();
mZMinGlob = 1e7;
mZMaxGlob = -1e7;
for (int anX = mX0Ter ; anX < mX1Ter ; anX++)
{
for (int anY = mY0Ter ; anY < mY1Ter ; anY++)
{
ElSetMin(mZMinGlob,mTabZMin[anY][anX]);
ElSetMax(mZMaxGlob,mTabZMax[anY][anX]);
}
}
mGpuSzD = 0;
if (mCurEtape->UseGeomDerivable())
{
mGpuSzD = mCurEtape->SzGeomDerivable();
Pt2di aSzOrtho = aBox.sz() + aSzV * 2;
Pt2di aSzTab = Pt2di(3,3) + aSzOrtho/mGpuSzD;
mGeoX.Resize(aSzTab);
mGeoY.Resize(aSzTab);
mTGeoX = TIm2D<REAL4,REAL8>(mGeoX);
mTGeoY = TIm2D<REAL4,REAL8>(mGeoY);
}
// std::cout << mGpuSzD << "\n";
}
double MAXDIST = 0.0;
bool cAppliMICMAC::InitZ(int aZ,eModeInitZ aMode)
{
mZIntCur =aZ;
mZTerCur = DequantZ(mZIntCur);
mImOkTerCur.raz();
mX0UtiTer = mX1Ter + 1;
mY0UtiTer = mY1Ter + 1;
mX1UtiTer = mX0Ter;
mY1UtiTer = mY0Ter;
for (int anX = mX0Ter ; anX < mX1Ter ; anX++)
{
for (int anY = mY0Ter ; anY < mY1Ter ; anY++)
{
mDOkTer[anY][anX] =
(mZIntCur >= mTabZMin[anY][anX])
&& (mZIntCur < mTabZMax[anY][anX])
&& IsInTer(anX,anY)
;
if ( mDOkTer[anY][anX])
{
ElSetMin(mX0UtiTer,anX);
ElSetMax(mX1UtiTer,anX);
ElSetMin(mY0UtiTer,anY);
ElSetMax(mY1UtiTer,anY);
}
}
}
mX1UtiTer ++;
mY1UtiTer ++;
if (mX0UtiTer >= mX1UtiTer)
return false;
int aKFirstIm = 0;
U_INT1 ** aDOkIm0TerDil = mDOkTerDil;
if (mGIm1IsInPax)
{
if (mFirstZIsInit)
{
aKFirstIm = 1;
}
else
{
mX0UtiTer = mX0Ter;
mX1UtiTer = mX1Ter;
mY0UtiTer = mY0Ter;
mY1UtiTer = mY1Ter;
aDOkIm0TerDil = mAll1DOkTerDil;
}
}
mX0UtiDilTer = mX0UtiTer - mCurSzV.x;
mY0UtiDilTer = mY0UtiTer - mCurSzV.y;
mX1UtiDilTer = mX1UtiTer + mCurSzV.x;
mY1UtiDilTer = mY1UtiTer + mCurSzV.y;
mX0UtiLocIm = mX0UtiTer - mDilX0Ter;
mX1UtiLocIm = mX1UtiTer - mDilX0Ter;
mY0UtiLocIm = mY0UtiTer - mDilY0Ter;
mY1UtiLocIm = mY1UtiTer - mDilY0Ter;
mX0UtiDilLocIm = mX0UtiDilTer - mDilX0Ter;
mX1UtiDilLocIm = mX1UtiDilTer - mDilX0Ter;
mY0UtiDilLocIm = mY0UtiDilTer - mDilY0Ter;
mY1UtiDilLocIm = mY1UtiDilTer - mDilY0Ter;
Box2di aBoxUtiLocIm(Pt2di(mX0UtiLocIm,mY0UtiLocIm),Pt2di(mX1UtiLocIm,mY1UtiLocIm));
Box2di aBoxUtiDilLocIm(Pt2di(mX0UtiDilLocIm,mY0UtiDilLocIm),Pt2di(mX1UtiDilLocIm,mY1UtiDilLocIm));
Dilate(mImOkTerCur,mImOkTerDil,mCurSzV,aBoxUtiDilLocIm);
cInterpolateurIm2D<float> * anInt = CurEtape()->InterpFloat();
cGPU_LoadedImGeom * aGLI_00 = mNbIm ? mVLI[0] : 0 ;
if (aMode==eModeMom_12_2_22)
{
ELISE_ASSERT(aGLI_00!=0,"Incohe eModeMom_12_2_22 with no Im in cAppliMICMAC::InitZ");
}
for (int aKIm= aKFirstIm ; aKIm<mNbIm ; aKIm++)
{
cGPU_LoadedImGeom & aGLI = *(mVLI[aKIm]);
const cGeomImage * aGeom=aGLI.Geom();
float ** aDataIm = aGLI.DataIm();
tGpuF ** aDOrtho = aGLI.DataOrtho();
U_INT1 ** aOkOr = aGLI.DataOKOrtho();
// Tabulation des projections image au pas de mGpuSzD
if (mGpuSzD)
{
int aNbX = (mX1UtiDilTer-mX0UtiDilTer +mGpuSzD) / mGpuSzD;
int aNbY = (mY1UtiDilTer-mY0UtiDilTer +mGpuSzD) / mGpuSzD;
for (int aKX = 0; aKX <= aNbX ; aKX++)
{
for (int aKY = 0; aKY <= aNbY ; aKY++)
{
Pt2dr aPTer = DequantPlani(mX0UtiDilTer+aKX*mGpuSzD,mY0UtiDilTer+aKY*mGpuSzD);
Pt2dr aPIm = aGeom->CurObj2Im(aPTer,&mZTerCur);
Pt2di anI(aKX,aKY);
mTGeoX.oset(anI,aPIm.x);
mTGeoY.oset(anI,aPIm.y);
}
}
}
U_INT1 ** aDLocOkTerDil = (aKIm==0) ? aDOkIm0TerDil : mDOkTerDil;
double aStep = 1.0/ElMax(1,mGpuSzD); // Histoire de ne pas diviser par 0
double anIndX = 0.0;
for (int anX = mX0UtiDilTer ; anX < mX1UtiDilTer ; anX++)
{
double anIndY = 0.0;
for (int anY = mY0UtiDilTer ; anY < mY1UtiDilTer ; anY++)
{
aOkOr[anY][anX] = 0;
aDOrtho[anY][anX] = 0.0;
if (aDLocOkTerDil[anY][anX])
{
Pt2dr aPIm;
if (mGpuSzD)
{
Pt2dr anInd(anIndX,anIndY);
aPIm = Pt2dr( mTGeoX.getr(anInd), mTGeoY.getr(anInd)) ;
}
else
{
Pt2dr aPTer = DequantPlani(anX,anY);
aPIm = aGeom->CurObj2Im(aPTer,&mZTerCur);
}
if (aGLI.IsOk(aPIm.x,aPIm.y))
{
aDOrtho[anY][anX] = anInt->GetVal(aDataIm,aPIm);
aOkOr[anY][anX] = 1;
}
}
anIndY += aStep;
}
anIndX += aStep;
}
SelfErode(aGLI.ImOK_Ortho(), mCurSzV,aBoxUtiLocIm);
if ( (aMode==eModeMom_2_22)
|| ((aKIm==0) && (aMode==eModeMom_12_2_22))
)
{
MomOrdre2(aGLI.ImOrtho(),aGLI.ImSomO(),aGLI.ImSomO2(),mCurSzV,aBoxUtiLocIm);
}
else if (aMode==eModeMom_12_2_22)
{
// std::cout << "KIM " << aKIm << "\n";
Mom12_22
(
aGLI_00->ImOrtho(),
aGLI.ImOrtho(),
aGLI.ImSom12(),
aGLI.ImSomO(),
aGLI.ImSomO2(),
mCurSzV,
aBoxUtiLocIm
);
}
}
mFirstZIsInit = true;
return true;
}
void cAppliMICMAC::DoOneCorrelSym(int anX,int anY)
{
double aCost = mAhDefCost;
std::vector<cGPU_LoadedImGeom *> aCurVLI;
for (int aKIm=0 ; aKIm<mNbIm ; aKIm++)
{
cGPU_LoadedImGeom * aGLI = (mVLI[aKIm]);
if (aGLI->InitValNorms(anX,anY))
{
aCurVLI.push_back(aGLI);
}
}
int aNbImCur = aCurVLI.size();
if (aNbImCur >= 2)
{
int aX0 = anX - mCurSzV.x;
int aX1 = anX + mCurSzV.x;
int aY0 = anY - mCurSzV.x;
int aY1 = anY + mCurSzV.x;
double anEC2 = 0;
for (int aXV=aX0 ; aXV<=aX1 ; aXV++)
{
for (int aYV=aY0 ; aYV<=aY1 ; aYV++)
{
double aSV = 0;
double aSVV = 0;
for (int aKIm=0 ; aKIm<aNbImCur ; aKIm++)
{
double aV = aCurVLI[aKIm]->ValNorm(aXV,aYV);
aSV += aV;
aSVV += QSquare(aV) ;
}
anEC2 += (aSVV-QSquare(aSV)/aNbImCur);
}
}
aCost = anEC2 / ((aNbImCur -1) * mNbPtsWFixe);
aCost = mStatGlob->CorrelToCout(1-aCost);
}
mSurfOpt->SetCout(Pt2di(anX,anY),&mZIntCur,aCost);
}
double EcartNormalise(double aI1,double aI2)
{
// X = I1/I2
if (aI1 < aI2) // X < 1
return aI1/aI2 -1; // X -1
return 1-aI2/aI1; // 1 -1/X
}
void cAppliMICMAC::DoOneCorrelIm1Maitre(int anX,int anY,const cMultiCorrelPonctuel * aCMP)
{
int aNbOk = 0;
double aSomCorrel = 0;
if (mVLI[0]->OkOrtho(anX,anY))
{
for (int aKIm=1 ; aKIm<mNbIm ; aKIm++)
{
double aCor;
if (mVLI[aKIm]->Correl(aCor,anX,anY,*(mVLI[0])))
{
aNbOk ++;
aSomCorrel += aCor;
}
}
}
if (aCMP)
{
std::vector<INT1> aVNorm;
if (mVLI[0]->OkOrtho(anX,anY))
{
tGpuF aV0 = mVLI[0]->ImOrtho(anX,anY);
for (int aK=1 ; aK<mNbIm ; aK++)
{
if (mVLI[aK]->OkOrtho(anX,anY))
{
double aVal = EcartNormalise(aV0,mVLI[aK]->ImOrtho(anX,anY));
aVNorm.push_back(AdaptCostPonct(round_ni(aVal*127)));
}
else
{
aVNorm.push_back(ValUndefCPONT);
}
}
}
else
{
for (int aK=1 ; aK<mNbIm ; aK++)
{
aVNorm.push_back(ValUndefCPONT);
}
}
mSurfOpt->Local_VecInt1(Pt2di(anX,anY),&mZIntCur,aVNorm);
}
mSurfOpt->SetCout
(
Pt2di(anX,anY),
&mZIntCur,
aNbOk ? mStatGlob->CorrelToCout(aSomCorrel/aNbOk) : mAhDefCost
);
}
void cAppliMICMAC::DoOneCorrelMaxIm1Maitre(int anX,int anY)
{
if (mEBI) // Etiq Best Image
{
if (mNbIm>1)
{
for (int aKIm=1 ; aKIm<mNbIm ; aKIm++)
{
double aCor;
bool Ok = mVLI[aKIm]->Correl(aCor,anX,anY,*(mVLI[0]));
aCor = Ok ? mStatGlob->CorrelToCout(aCor) : mAhDefCost;
mSurfOpt->SetCout ( Pt2di(anX,anY),&mZIntCur,aCor, aKIm-1);
}
}
else
{
mSurfOpt->SetCout(Pt2di(anX,anY),&mZIntCur,mAhDefCost,0);
}
}
else
{
double aMaxCorrel = -2;
if (mVLI[0]->OkOrtho(anX,anY))
{
for (int aKIm=1 ; aKIm<mNbIm ; aKIm++)
{
double aCor;
if (mVLI[aKIm]->Correl(aCor,anX,anY,*(mVLI[0])))
{
ElSetMax(aMaxCorrel,aCor);
}
}
}
mSurfOpt->SetCout
(
Pt2di(anX,anY),
&mZIntCur,
(aMaxCorrel>-1) ? mStatGlob->CorrelToCout(aMaxCorrel) : mAhDefCost
);
}
}
void cAppliMICMAC::DoGPU_Correl
(
const Box2di & aBox,
const cMultiCorrelPonctuel * aMCP
)
{
eModeInitZ aModeInitZ = eModeMom_2_22;
eModeAggregCorr aModeAgr = mCurEtape->EtapeMEC().AggregCorr().Val();
if (aMCP)
{
ELISE_ASSERT(aModeAgr==eAggregIm1Maitre,"MultiCorrelPonctuel requires eAggregIm1Maitre");
}
if (aModeAgr==eAggregSymetrique)
{
}
//else if ((aModeAgr==eAggregIm1Maitre) || (aModeAgr==eAggregMaxIm1Maitre))
else if (IsModeIm1Maitre(aModeAgr))
{
aModeInitZ = eModeMom_12_2_22;
}
else
{
ELISE_ASSERT(false,"Unsupported Mode Aggreg in cAppliMICMAC::DoGPU_Correl");
}
for (int aZ=mZMinGlob ; aZ<mZMaxGlob ; aZ++)
{
if (InitZ(aZ,aModeInitZ))
{
for (int anX = mX0UtiTer ; anX < mX1UtiTer ; anX++)
{
for (int anY = mY0UtiTer ; anY < mY1UtiTer ; anY++)
{
if (mDOkTer[anY][anX])
{
switch (aModeAgr)
{
case eAggregSymetrique :
DoOneCorrelSym(anX,anY);
break;
case eAggregIm1Maitre :
DoOneCorrelIm1Maitre(anX,anY,aMCP);
break;
case eAggregMaxIm1Maitre :
DoOneCorrelMaxIm1Maitre(anX,anY);
break;
default :
break;
}
/*
*/
}
}
}
}
}
}
void cAppliMICMAC::DoGPU_Correl_Basik
(
const Box2di & aBox
)
{
// Lecture des parametre d'environnement MicMac : nappes, images, quantification etc ...
// Masque des points terrains valides
// U_INT1 ** aTabMasqTER = mLTer->GPULowLevel_MasqTer();
// Deux constantes : cout lorque la correlation ne peut etre calculee et
// ecart type minmal
// double aAhDefCost = mStatGlob->CorrelToCout(mDefCorr);
// double anAhEpsilon = EpsilonCorrelation().Val();
// Buffer pour pointer sur l'ensmble des vignettes OK
std::vector<double *> aVecVals(mNbIm);
double ** aVVals = &(aVecVals[0]);
// Au boulot ! on balaye le terrain
for (int anX = mX0Ter ; anX < mX1Ter ; anX++)
{
for (int anY = mY0Ter ; anY < mY1Ter ; anY++)
{
int aZMin = mTabZMin[anY][anX];
int aZMax = mTabZMax[anY][anX];
// est-on dans le masque des points terrains valide
if ( IsInTer(anX,anY))
{
// Bornes du voisinage
int aX0v = anX-mPtSzWFixe.x;
int aX1v = anX+mPtSzWFixe.x;
int aY0v = anY-mPtSzWFixe.y;
int aY1v = anY+mPtSzWFixe.y;
// on parcourt l'intervalle de Z compris dans la nappe au point courant
for (int aZInt=aZMin ; aZInt< aZMax ; aZInt++)
{
// Pointera sur la derniere imagette OK
double ** aVVCur = aVVals;
// Statistique MICMAC
mNbPointsIsole++;
// On dequantifie le Z
double aZReel = DequantZ(aZInt); // anOrigineZ+ aZInt*aStepZ;
int aNbImOk = 0;
// On balaye les images pour lire les valeur et stocker, par image,
// un vecteur des valeurs voisine normalisees en moyenne et ecart type
for (int aKIm=0 ; aKIm<mNbIm ; aKIm++)
{
cGPU_LoadedImGeom & aGLI = *(mVLI[aKIm]);
const cGeomImage * aGeom=aGLI.Geom();
float ** aDataIm = aGLI.DataIm();
// Pour empiler les valeurs
double * mValsIm = aGLI.Vals();
double * mCurVals = mValsIm;
// Pour stocker les moment d'ordre 1 et 2
double aSV = 0;
double aSVV = 0;
// En cas de gestion parties cachees, un masque terrain
// de visibilite a ete calcule par image
if (aGLI.IsVisible(anX,anY))
{
// memorise le fait que tout est OK pour le pixel et l'image consideres
bool IsOk = true;
// Balaye le voisinage
for (int aXVois=aX0v ; (aXVois<=aX1v)&&IsOk; aXVois++)
{
for (int aYVois= aY0v; (aYVois<=aY1v)&&IsOk; aYVois++)
{
// On dequantifie la plani
Pt2dr aPTer = DequantPlani(aXVois,aYVois);
// On projette dans l'image
Pt2dr aPIm = aGeom->CurObj2Im(aPTer,&aZReel);
/*
if (MPD_MM())
{
static int aCpt; aCpt++;
std::cout << "Cpt " << aCpt << " " << aPIm << aPTer << " OK " << aGLI.IsOk(aPIm.x,aPIm.y) << "\n";
getchar();
}
*/
if (aGLI.IsOk(aPIm.x,aPIm.y))
{
// On utilise l'interpolateur pour lire la valeur image
double aVal = mInterpolTabule.GetVal(aDataIm,aPIm);
// On "push" la nouvelle valeur de l'image
*(mCurVals++) = aVal;
aSV += aVal;
aSVV += QSquare(aVal) ;
// mValsIm.push_back(mInterpolTabule.GetVal(aDataIm,aPIm));
// *(mTopPts++) = aPIm;
}
else
{
// Si un seul des voisin n'est pas lisible , on annule tout
IsOk =false;
}
}
}
if (IsOk)
{
// On normalise en moyenne et ecart type
aSV /= mNbPtsWFixe;
aSVV /= mNbPtsWFixe;
aSVV -= QSquare(aSV) ;
if (aSVV >mAhEpsilon) // Test pour eviter / 0 et sqrt(<0)
{
*(aVVCur++) = mValsIm;
aSVV = sqrt(aSVV);
for (int aKV=0 ; aKV<mNbPtsWFixe; aKV++)
mValsIm[aKV] = (mValsIm[aKV]-aSV)/aSVV;
}
else
{
IsOk = false;
}
}
aNbImOk += IsOk;
aGLI.SetOK(IsOk);
}
else
{
aGLI.SetOK(false);
}
}
// Calcul "rapide" de la multi-correlation en utilisant la formule
// de Huygens comme decrit en 3.5 de la Doc MicMac
if (aNbImOk>=2)
{
double anEC2 = 0;
// Pour chaque pixel
for (int aKV=0 ; aKV<mNbPtsWFixe; aKV++)
{
double aSV=0,aSVV=0;
// Pour chaque image, maj des stat 1 et 2
for (int aKIm=0 ; aKIm<aNbImOk ; aKIm++)
{
double aV = aVVals[aKIm][aKV];
aSV += aV;
aSVV += QSquare(aV);
}
// Additionner l'ecart type inter imagettes
anEC2 += (aSVV-QSquare(aSV)/aNbImOk);
}
// Normalisation pour le ramener a un equivalent de 1-Correl
double aCost = anEC2 / (( aNbImOk-1) *mNbPtsWFixe);
aCost = mStatGlob->CorrelToCout(1-aCost);
// On envoie le resultat a l'optimiseur pour valoir ce que de droit
mSurfOpt->SetCout(Pt2di(anX,anY),&aZInt,aCost);
// if (Debug) std::cout << "Z " << aZInt << " Cost " << aCost << "\n";
}
else
{
// if (Debug) std::cout << "Z " << aZInt << " DEF " << aDefCost << "\n";
// Si pas assez d'image, il faut quand meme remplir la case avec qq chose
mSurfOpt->SetCout(Pt2di(anX,anY),&aZInt,mAhDefCost);
}
}
}
else
{
for (int aZInt=aZMin ; aZInt< aZMax ; aZInt++)
{
mSurfOpt->SetCout(Pt2di(anX,anY),&aZInt,mAhDefCost);
}
}
}
}
}
void cAppliMICMAC::DoCorrelAdHoc
(
const Box2di & aBox
)
{
if (mEBI)
{
ELISE_ASSERT
(
mCurEtape->EtapeMEC().AggregCorr().Val() == eAggregMaxIm1Maitre,
"EtiqBestImage requires eAggregMaxIm1Maitre,"
);
/// ELISE_ASSERT(mNb_PDVBoxInterne>,);
}
// Pour eventuellement changer si existe algo qui impose une taille
Pt2di aSzV = mPtSzWFixe;
DoInitAdHoc(aBox,aSzV);
const cTypeCAH & aTC = mCorrelAdHoc->TypeCAH();
if (aTC.GPU_Correl().IsInit())
{
DoGPU_Correl(aBox,(cMultiCorrelPonctuel*)0);
}
else if (aTC.GPU_CorrelBasik().IsInit())
{
DoGPU_Correl_Basik(aBox);
}
else if (aTC.Correl_Ponctuel2ImGeomI().IsInit())
{
DoCorrelPonctuelle2ImGeomI(aBox,aTC.Correl_Ponctuel2ImGeomI().Val());
}
else if (aTC.Correl_PonctuelleCroisee().IsInit())
{
DoCorrelCroisee2ImGeomI(aBox,aTC.Correl_PonctuelleCroisee().Val());
}
else if (aTC.Correl_MultiFen().IsInit())
{
DoCorrelMultiFen(aBox,aTC.Correl_MultiFen().Val());
}
else if (aTC.Correl_Correl_MNE_ZPredic().IsInit())
{
Correl_MNE_ZPredic(aBox,aTC.Correl_Correl_MNE_ZPredic().Val());
}
else if (aTC.Correl_NC_Robuste().IsInit())
{
DoCorrelRobusteNonCentree(aBox,aTC.Correl_NC_Robuste().Val());
}
else if (aTC.MultiCorrelPonctuel().IsInit())
{
DoGPU_Correl(aBox,(aTC.MultiCorrelPonctuel().PtrVal()));
}
}
void cAppliMICMAC::GlobDoCorrelAdHoc
(
const Box2di & aBox
)
{
mEpsAddMoy = mCorrelAdHoc->EpsilonAddMoyenne().Val();
mEpsMulMoy = mCorrelAdHoc->EpsilonMulMoyenne().Val();
cDecoupageInterv2D aDecInterv =
cDecoupageInterv2D::SimpleDec
(
aBox.sz(),
mCorrelAdHoc->SzBlocAH().Val(),
0
);
for (int aKBox=0 ; aKBox<aDecInterv.NbInterv() ; aKBox++)
{
DoCorrelAdHoc(aDecInterv.KthIntervOut(aKBox));
}
}
};
/*Footer-MicMac-eLiSe-25/06/2007
Ce logiciel est un programme informatique servant à la mise en
correspondances d'images pour la reconstruction du relief.
Ce logiciel est régi par la licence CeCILL-B soumise au droit français et
respectant les principes de diffusion des logiciels libres. Vous pouvez
utiliser, modifier et/ou redistribuer ce programme sous les conditions
de la licence CeCILL-B telle que diffusée par le CEA, le CNRS et l'INRIA
sur le site "http://www.cecill.info".
En contrepartie de l'accessibilité au code source et des droits de copie,
de modification et de redistribution accordés par cette licence, il n'est
offert aux utilisateurs qu'une garantie limitée. Pour les mêmes raisons,
seule une responsabilité restreinte pèse sur l'auteur du programme, le
titulaire des droits patrimoniaux et les concédants successifs.
A cet égard l'attention de l'utilisateur est attirée sur les risques
associés au chargement, à l'utilisation, à la modification et/ou au
développement et à la reproduction du logiciel par l'utilisateur étant
donné sa spécificité de logiciel libre, qui peut le rendre complexe à
manipuler et qui le réserve donc à des développeurs et des professionnels
avertis possédant des connaissances informatiques approfondies. Les
utilisateurs sont donc invités à charger et tester l'adéquation du
logiciel à leurs besoins dans des conditions permettant d'assurer la
sécurité de leurs systèmes et ou de leurs données et, plus généralement,
à l'utiliser et l'exploiter dans les mêmes conditions de sécurité.
Le fait que vous puissiez accéder à cet en-tête signifie que vous avez
pris connaissance de la licence CeCILL-B, et que vous en avez accepté les
termes.
Footer-MicMac-eLiSe-25/06/2007*/
| 19,088 |
12,252 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.saml.processing.core.parsers.util;
import org.keycloak.dom.saml.v2.assertion.NameIDType;
import org.keycloak.saml.common.PicketLinkLogger;
import org.keycloak.saml.common.PicketLinkLoggerFactory;
import org.keycloak.saml.common.exceptions.ParsingException;
import org.keycloak.saml.common.util.StaxParserUtil;
import org.keycloak.saml.processing.core.parsers.saml.assertion.SAMLAssertionQNames;
import java.util.Objects;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.events.StartElement;
/**
* Utility methods for SAML Parser
*
* @author <EMAIL>
* @since Nov 4, 2010
*/
public class SAMLParserUtil {
private static final PicketLinkLogger LOGGER = PicketLinkLoggerFactory.getLogger();
/**
* Parse a {@code NameIDType}
*
* @param xmlEventReader
*
* @return
*
* @throws ParsingException
*/
public static NameIDType parseNameIDType(XMLEventReader xmlEventReader) throws ParsingException {
StartElement nameIDElement = StaxParserUtil.getNextStartElement(xmlEventReader);
NameIDType nameID = new NameIDType();
nameID.setFormat(StaxParserUtil.getUriAttributeValue(nameIDElement, SAMLAssertionQNames.ATTR_FORMAT));
nameID.setNameQualifier(StaxParserUtil.getAttributeValue(nameIDElement, SAMLAssertionQNames.ATTR_NAME_QUALIFIER));
nameID.setSPProvidedID(StaxParserUtil.getAttributeValue(nameIDElement, SAMLAssertionQNames.ATTR_SP_PROVIDED_ID));
nameID.setSPNameQualifier(StaxParserUtil.getAttributeValue(nameIDElement, SAMLAssertionQNames.ATTR_SP_NAME_QUALIFIER));
String nameIDValue = StaxParserUtil.getElementText(xmlEventReader);
nameID.setValue(nameIDValue);
return nameID;
}
public static void validateAttributeValue(StartElement element, HasQName attributeName, String expectedValue) throws ParsingException {
String value = StaxParserUtil.getRequiredAttributeValue(element, attributeName);
if (! Objects.equals(expectedValue, value)) {
throw LOGGER.parserException(new RuntimeException(
String.format("%s %s required to be \"%s\"", element.getName(), attributeName.getQName(), expectedValue)));
}
}
} | 1,008 |
2,542 | // ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
#include "Store.h"
using namespace ktl;
using namespace Data;
using namespace Data::TStore;
using namespace Data::Utilities;
NTSTATUS Factory::Create(
__in Data::Utilities::PartitionedReplicaId const & traceId,
__in IComparer<KString::SPtr>& keyComparer,
__in KDelegate<ULONG(const KString::SPtr&Key)> func,
__in KAllocator & allocator,
__in KUriView & name,
__in FABRIC_STATE_PROVIDER_ID stateProviderId,
__in Data::StateManager::IStateSerializer<KString::SPtr>& keySerializer,
__in Data::StateManager::IStateSerializer<KBuffer::SPtr>& valueSerializer,
__out IStore<KString::SPtr, KBuffer::SPtr>::SPtr& result)
{
Store<KString::SPtr, KBuffer::SPtr>::SPtr storeSPtr = nullptr;
NTSTATUS status = Store<KString::SPtr, KBuffer::SPtr>::Create(traceId, keyComparer, func, allocator, name, stateProviderId, keySerializer, valueSerializer, storeSPtr);
Diagnostics::Validate(status);
result = IStore<KString::SPtr, KBuffer::SPtr>::SPtr(storeSPtr.RawPtr());
return status;
}
| 438 |
351 | ################################################################################
# _ ____ ___ #
# / \ / ___|_ _| #
# / _ \| | | | #
# / ___ \ |___ | | #
# ____ _/_/ _\_\____|___| _ #
# / ___|__ _| |__ | | ___ | _ \| | __ _ _ __ #
# | | / _` | '_ \| |/ _ \ | |_) | |/ _` | '_ \ #
# | |__| (_| | |_) | | __/ | __/| | (_| | | | | #
# \____\__,_|_.__/|_|\___| |_| |_|\__,_|_| |_| #
# #
################################################################################
# #
# Copyright (c) 2015 Cisco Systems #
# All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT #
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #
# License for the specific language governing permissions and limitations #
# under the License. #
# #
################################################################################
import sys
import re
import acitoolkit as ACI
eTree = None
Verbose_import_ = False
(
XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as eTree
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as eTree
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as eTree
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as eTree
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
# noinspection PyUnresolvedReferences
import elementtree.ElementTree as eTree
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError(
"Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
"""
parsexml_
:param args:
:param kwargs:
:return: doc
"""
if XMLParser_import_library == XMLParser_import_lxml and 'parser' not in kwargs:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = eTree.ETCompatXMLParser()
doc = eTree.parse(*args, **kwargs)
return doc
#
# Globals
#
Tag_pattern_ = re.compile(r'({.*})?(.*)')
#
# Support/utility functions.
#
def indent(level):
"""
Indent the text to a specified level
:param level: The number of 4 space increments
:return: String containing the desired number of spaces for indentation
"""
return level * ' '
def quote_attrib(in_str):
s1 = (isinstance(in_str, basestring) and in_str or
'%s' % in_str)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def suffix_to_int(string):
return int(re.search(r'(\d+)$', string).group(1))
class CABLEPLAN:
def __init__(self, version=None):
self.version = version
self.switches = []
self.links = []
self.schemaLocation = 'nxos-cable-plan-schema.xsd'
self.nsmap = None
self.namespace = 'http://www.cisco.com/cableplan/Schema2'
self.prefix = 'xsi'
self.prefix_url = 'http://www.w3.org/2001/XMLSchema-instance'
self.networkLocation = None
self.idFormat = 'hostname'
@classmethod
def get(cls, source):
"""This will get input a cable plan from 'source'. If source is a string,
it will get the cable plan from XML in a file whose name is source. If it is
a Session, it will read the corresponding APIC to get the cable plan.
:param source: filename of type string or Session of type Session
:returns: CABLEPLAN
"""
if isinstance(source, str):
return cls._parse(source)
elif isinstance(source, ACI.Session):
return cls._parse_apic(source)
else:
raise TypeError('source must of type str or type ACI.Session. Instead was '+type(source))
@classmethod
def _parse(cls, in_file_name):
doc = parsexml_(in_file_name)
# This can be enhanced to parse a string rather than just a file with the
# following line:
# doc = parsexml_(StringIO(inString))
root_node = doc.getroot()
cable_plan = cls()
cable_plan._build_xml(root_node)
return cable_plan
@classmethod
def _parse_apic(cls, session):
pod = ACI.Pod.get(session)[0]
pod.populate_children(deep=True)
cable_plan = cls()
cable_plan._build_apic(pod)
return cable_plan
def get_switch(self, switch_name=None):
if switch_name:
for switch in self.switches:
if switch.get_name() == switch_name:
return switch
return None
else:
return self.switches[:]
def get_spines(self):
"""Will return list of switches that are spines
:returns: list of CpSwitch
"""
switch_list = []
for switch in self.switches:
if switch.is_spine():
switch_list.append(switch)
return switch_list
def add_switch(self, new_switch):
"""This will new_switch to the CABLEPLAN. If the switch already
exists, it will merge the new_switch with the existing one.
It will also set the parent of the switch to be the CABLEPLAN. It will
return the final switch, i.e. new_switch if no merge occurred or the
newly merged switch if a merge did occur.
:param new_switch: switch to be added of type CpSwitch
:returns: CpSwitch
"""
if not isinstance(new_switch, CpSwitch):
raise TypeError('add_switch expects object of type CpSwitch')
new_switch.set_parent(self)
for switch in self.switches:
if switch == new_switch:
switch.merge(new_switch)
del new_switch
return switch
self.switches.append(new_switch)
return new_switch
def delete_switch(self, old_switch):
if old_switch in self.switches:
self.switches.remove(old_switch)
def exists_switch(self, switch):
return switch in self.switches
def add_link(self, new_link):
"""Will add a link to the CABLEPLAN. Duplicates will not be allow, but overlapping will be.
:param new_link: Link to be added of type CpLink
:returns: None
"""
if new_link not in self.links:
self.links.append(new_link)
def delete_link(self, link):
if link in self.links:
self.links.remove(link)
def exists_link(self, link):
return link in self.links
def get_links(self, switch1=None, switch2=None):
"""Returns a list of links. If switch is unspecified, it will return all links. If switch is specified,
it will return all of the links that are connected to switch. If both switch1 and swithc2 are specified,
it will return all links that are connected between the two switches.
:param switch1: optional first switch of type CpSwitch
:param switch2: optional second switch of type CpSwitch
:returns: list of links of type CpLink
"""
if switch1:
link_list = []
for link in self.links:
if link.is_connected(switch1, switch2):
link_list.append(link)
return link_list
else:
return self.links[:]
def difference_switch(self, cp):
"""Will return a list of switches that are in self, but not in cp.
:param cp: cable plan
:returns: list of CpSwitch
"""
result = []
myswitches = self.get_switch()
cpswitches = cp.get_switch()
for switch in myswitches:
if switch not in cpswitches:
result.append(switch)
return result
# Link comparison operations and support functions
def reset_accounting(self):
"""clears the refernce count on each link
:rtype : None
"""
for link in self.links:
link.reset_accounting()
def sorted_links(self, switch1, switch2):
"""returns a sorted list of links between switch1 and switch2. They are sorted by specificity from
most specific to least specific. The specificity is determined by which list of ports is the minimum
between source and destination and which is the minimum across links.
:rtype : list
:param switch1:
:param switch2:
"""
result = []
links = self.get_links(switch1, switch2)
num_links = len(links)
for i in range(num_links):
best_order = 100000
best_link = None
for link in links:
if (link.order() < best_order) and (link not in result):
best_order = link.order()
best_link = link
if best_order < 100000:
result.append(best_link)
return result
def _switch_link_diff(self, cp, switch1, switch2):
""" returns a list links that go between switch1 and switch2 that are in self, but not in cp
:param cp: cable plan of type CP_CABLEPLAN
:param switch1: first switch of type CpSwitch
:param switch2: second switch of type CpSwitch
:returns: list of CpLink
"""
my_links = self.sorted_links(switch1, switch2)
other_links = cp.sorted_links(switch1, switch2)
for my_link in my_links:
if my_link.remaining_need() > 0:
# still need to retire some link capacity
for otherLink in other_links: # loop through all of the otherLinks to find matches
if otherLink.remaining_avail() > 0:
# there is still some capacity in otherLink
CpLink.match_links(my_link, otherLink) # match-up links
if my_link.remaining_need() == 0:
# done with myLink, go get next one
break
def difference_link(self, cp):
"""returns a list of links that are in self, but not in cp.
:param cp: cable plan of type CABLEPLAN
:returns: list of CpLink
"""
result = []
self.reset_accounting()
cp.reset_accounting()
for switch1 in self.get_switch():
for switch2 in self.get_switch():
self._switch_link_diff(cp, switch1, switch2)
for myLink in self.get_links():
if myLink.remaining_need() > 0:
result.append(myLink)
return result
def export(self, out_file=None, level=0):
"""Will generate XML text of the entire CABLEPLAN and return it as a string. If
out_file is specified, it will write the XML to that file. out_file should be opened for
writing before calling this method. 'level' specifies the amount of indentation to start with.
"""
if out_file:
if not isinstance(out_file, file):
raise TypeError('expected a file')
tag = 'CISCO_NETWORK_TYPES'
text = '<?xml version="1.0" encoding="UTF-8"?>\n'
text += '<?created by cableplan.py?>\n'
text += indent(level)
text += '<%s version=%s xmlns=%s xmlns:%s=%s %s:schemaLocation=%s>\n' % (
tag, quote_attrib(self.version), quote_attrib(self.namespace), self.prefix, quote_attrib(self.prefix_url),
self.prefix, quote_attrib(self.namespace + ' ' + self.schemaLocation))
text += self.export_data_center(level=level + 1)
text += indent(level)
text += '</%s>\n' % tag
if out_file:
out_file.write(text)
return text
def export_data_center(self, level=0):
"""Will generate the XML of the CABLEPLAN with DATA_CENTER as the root. This will then be
returned a string. 'level' specifies the indentation level to start with.
:param level: optional indention level, integer
:returns: string that is the DATA_CENTER xml
"""
tag = 'DATA_CENTER'
text = indent(level)
text += '<%s networkLocation=%s idFormat=%s>\n' % (
tag, quote_attrib(self.networkLocation), quote_attrib(self.idFormat))
switches = self.get_spines()
for switch in switches:
text += switch.export(level + 1)
text += indent(level)
text += '</%s>\n' % tag
return text
@staticmethod
def _get_attr_value(node, attr_name):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
def _get_namespace_prefix(self, nsmap):
for nsmap_prefix in nsmap:
if nsmap_prefix is not None:
self.prefix = nsmap_prefix
self.prefix_url = nsmap[nsmap_prefix]
if nsmap_prefix is None:
self.namespace = nsmap[nsmap_prefix]
def _get_switch_from_apic_link(self, link, end):
if end == 1:
switch_name = link.get_node1().get_name()
elif end == 2:
switch_name = link.get_node2().get_name()
else:
switch_name = ''
cp_switches = self.get_switch()
for cp_switch in cp_switches:
if cp_switch.get_name() == switch_name:
return cp_switch
def _build_apic(self, pod):
"""This will build the cable plan using the configuration of the pod.
:param pod: Pod
:returns: None
"""
nodes = pod.get_children(ACI.Node)
for node in nodes:
if node.getFabricSt() == 'active':
if node.get_role() == 'spine':
self.add_switch(CpSwitch(node.get_name(), node.get_chassis_type(), spine=True))
if node.get_role() == 'leaf':
self.add_switch(CpSwitch(node.get_name(), node.get_chassis_type()))
links = pod.get_children(ACI.Link)
for link in links:
switch1 = link.get_node2()
switch2 = link.get_node2()
if switch1 and switch2:
if link.get_node1().getFabricSt() == 'active' and link.get_node2().getFabricSt() == 'active':
if link.get_node1().get_role() != 'controller' and link.get_node2().get_role() != 'controller':
source_chassis = self._get_switch_from_apic_link(link, 1)
dest_chassis = self._get_switch_from_apic_link(link, 2)
source_interface = link.get_port1()
dest_interface = link.get_port2()
source_port = '{0:s}{1:s}/{2:s}'.format(source_interface.interface_type,
source_interface.module, source_interface.port)
dest_port = '{0:s}{1:s}/{2:s}'.format(dest_interface.interface_type,
dest_interface.module,
dest_interface.port)
self.add_link(
CpLink(source_chassis=source_chassis, source_port=source_port, dest_chassis=dest_chassis,
dest_port=dest_port))
def _build_xml(self, node):
# start at CISCO_NETWORK_TYPES
self.version = self._get_attr_value(node, 'version')
self.nsmap = node.nsmap # namespace prefix can be found here
self._get_namespace_prefix(self.nsmap) # parse out namespace and prefix
# TODO: should be refined to handle any namespace prefix
self.schemaLocation = self._get_attr_value(node,
'xsi:schemaLocation').strip()
for child in node:
node_name = Tag_pattern_.match(child.tag).groups()[-1]
if node_name == 'DATA_CENTER':
self._parse_xml_data_center(child)
def _parse_xml_data_center(self, node):
self.networkLocation = self._get_attr_value(node, 'networkLocation')
self.idFormat = self._get_attr_value(node, 'idFormat')
for child in node:
node_name = Tag_pattern_.match(child.tag).groups()[-1]
if node_name == 'CHASSIS_INFO':
self._parse_xml_chassis_info(child)
def _parse_xml_chassis_info(self, node):
chassis_name = self._get_attr_value(node, 'sourceChassis')
chassis_type = self._get_attr_value(node, 'type')
switch = CpSwitch(chassis_name, chassis_type, spine=True)
self.add_switch(switch)
for child in node:
node_name = Tag_pattern_.match(child.tag).groups()[-1]
if node_name == 'LINK_INFO':
self._parse_xml_link_info(child, switch)
def _parse_xml_link_info(self, node, link_source_chassis):
link_dest_chassis = self._get_attr_value(node, 'destChassis')
link_dest_port = self._get_attr_value(node, 'destPort')
link_source_port = self._get_attr_value(node, 'sourcePort')
link_min_ports = self._get_attr_value(node, 'minPorts')
link_max_ports = self._get_attr_value(node, 'maxPorts')
switch = CpSwitch(link_dest_chassis, chassis_type=None)
switch = self.add_switch(switch)
link = CpLink(source_chassis=link_source_chassis, source_port=link_source_port, dest_chassis=switch,
dest_port=link_dest_port, min_ports=link_min_ports, max_ports=link_max_ports)
self.add_link(link)
class CpSwitch(object):
""" class holding a switch """
def __init__(self, name, chassis_type=None, spine=False, parent=None):
self.spine = spine
self.name = name
self.chassis_type = chassis_type
self.parent = None
if parent:
self.set_parent(parent)
def get_name(self):
"""Gets the name of the chassis.
:returns: str
"""
return self.name
def set_name(self, name):
"""Sets the switch name. This will over-ride any preexisting name. Note that this new
name will now become part of the link name for all the links attached to this switch.
:param name: name string to set in the switch
"""
self.name = name
return None
def get_type(self):
"""Gets the chassis type. Examples of chassis types are 'n7k' or 'n9k'
:returns: str
"""
return self.chassis_type
def set_parent(self, parent):
"""Sets the parent of the switch. Parent must be of type CABLEPLAN. If a parent
CABLEPLAN was already set and it is differnt from parent, then an error is raised.
:param parent: parent object of type CABLEPLAN
"""
if not isinstance(parent, CABLEPLAN):
raise TypeError('expected parent to be of class CABLEPLAN')
if self.parent:
if self.parent != parent:
raise ValueError('This switch was previously assigned to a different CABLEPLAN')
self.parent = parent
def is_spine(self):
"""Checks if the 'spine' flag is set.
:returns: True if the ``spine`` flag is set, otherwise False
"""
return self.spine
def merge(self, new_switch):
""" Merges the content of new_switch with self. If self has variables set, then they will
not be changed. If they have not been set, then they will be assigned the value from new_switch.
:param new_switch: switch object to merge with self
"""
if new_switch.spine:
self.spine = new_switch.spine
if new_switch.chassis_type:
self.chassis_type = new_switch.chassis_type
def __eq__(self, other):
return self.name == other.name
def get_links(self):
"""returns a list of CP_LINKS from the parent CABLEPLAN that are connected to self.
:returns: list of CP_LINKS
"""
return self.parent.get_links(self)
def __str__(self):
return self.name
def export(self, level):
tag = 'CHASSIS_INFO'
text = indent(level)
text += '<%s sourceChassis=%s type=%s>\n' % (tag, quote_attrib(self.get_name()), quote_attrib(self.get_type()))
links = self.get_links()
for link in links:
text += link.export(self, level + 1)
text += indent(level)
text += '</%s>\n' % tag
return text
# end class CpSwitch
class CpPort:
"""This class holds the information for a link's port. Since the port can be a single port, a list
or a range, putting it in a class allows more flexible operations on it.
"""
def __init__(self, port_set):
self.ports = self._expand(port_set)
self.available_ports = None
@staticmethod
def _expand(port_set):
"""Will parse the port_set and return a list of enumerated ports or None.
port_set is string containing a comma separated list of ports or port ranges.
A port range consists of a starting port separated from an ending port with a dash.
Both the starting port and ending port are included in the list.
The format for a port is a string that ends in a forward slash followed by a number.
The number is what is incremented
for a range. A dash, '-' is not legal in the port name.
:param port_set: string
:returns: list of str
"""
if port_set is None:
return []
# use a set so that there are no duplicate ports
port_list = set()
port_set = re.sub(r'\s+', '', port_set) # remove unnecessary white space
ports_n_ranges = port_set.split(',')
for portOrRange in ports_n_ranges:
if '-' in portOrRange:
# this is a range
startport, endport = portOrRange.split('-')
prefix = re.findall(r'(.*/)\d+$', startport)
if len(prefix) != 1:
raise ValueError('Badly formed port name in range:"' + startport + '"')
prefix_e = re.findall(r'(.*/)\d+$', endport)
if len(prefix_e) != 1:
raise ValueError('Badly formed port name in range:"' + endport + '"')
if prefix[0] != prefix_e[0]:
raise ValueError('port range invalid:"' + portOrRange + '"')
start_num = suffix_to_int(startport)
end_num = suffix_to_int(endport)
if start_num > end_num:
raise ValueError(
'port range invalid - start of range cannot be higher than end:"' + portOrRange + '"')
for index in range(start_num, end_num + 1):
port_name = prefix[0] + str(index)
port_list.add(port_name)
else:
# this is just a port
port_list.add(portOrRange)
return sorted(list(port_list))
def _rangeify(self):
""" this will take the list of ports and return a string of comma separated ports and port
ranges. A port range will be generated for any sequence longer than two ports.
"""
if not self.ports:
return None
text_list = []
index = 0
numports = len(self.ports)
start_port = self.ports[index]
cur_port = start_port
cur_num = suffix_to_int(cur_port)
cur_prefix = re.findall(r'(.*/)\d+$', cur_port)[0]
start_num = suffix_to_int(start_port)
while index < (numports - 1):
next_port = self.ports[index + 1]
next_num = suffix_to_int(next_port)
next_prefix = re.findall(r'(.*/)\d+$', next_port)[0]
if next_num != cur_num + 1 or next_prefix != cur_prefix:
# there is a break in the sequence terminate the range
if cur_num == start_num:
text_list.append(start_port)
elif cur_num - start_num == 1:
text_list.append(start_port)
text_list.append(cur_port)
else:
text_list.append(start_port + ' - ' + cur_port)
start_port = next_port
start_num = suffix_to_int(start_port)
index += 1
cur_port = self.ports[index]
cur_num = suffix_to_int(cur_port)
cur_prefix = re.findall(r'(.*/)\d+$', cur_port)[0]
# clean-up - index is one past end, cur is last one looked at
if cur_num == start_num:
text_list.append(start_port)
elif cur_num - start_num == 1:
text_list.append(start_port)
text_list.append(cur_port)
else:
text_list.append(start_port + ' - ' + cur_port)
if not text_list:
text = None
else:
text = ', '.join(text_list)
return text
def reset_accounting(self):
self.available_ports = self.ports[:]
def remove_available_port(self, port):
if self.ports is None:
return
else:
if port in self.available_ports:
self.available_ports.remove(port)
def list(self):
return self.ports[:]
def __str__(self):
text = self._rangeify()
return str(text)
def name(self):
text = self._rangeify()
return text
def __eq__(self, other):
""" compares the content of the port list and returns true if they are the same. The comparison is case insensitive.
"""
if not self.ports and not other.ports:
return True
elif not self.ports and other.ports:
return False
elif self.ports and not other.ports:
return False
my_ports = set()
for port in self.ports:
my_ports.add(port.lower())
other_ports = set()
for port in other.ports:
other_ports.add(port.lower())
if len(my_ports ^ other_ports) == 0:
return True
else:
return False
class CpLink:
def __init__(self, source_chassis, dest_chassis, source_port=None, dest_port=None, min_ports=None, max_ports=None):
if not isinstance(source_chassis, CpSwitch):
raise TypeError('expected source_chassis to be of class CpSwitch')
if not isinstance(dest_chassis, CpSwitch):
raise TypeError('expected dest_chassis to be of class CpSwitch')
# allow initialization to be with a list or a string for ports.
# convert all to a string
if isinstance(dest_port, list):
dest_port = ', '.join(dest_port)
if isinstance(source_port, list):
source_port = ', '.join(source_port)
self.minPorts = min_ports
self.maxPorts = max_ports
self.refCount = 0
# initially normalize name
if source_chassis.get_name() < dest_chassis.get_name():
self.source_chassis = source_chassis
self.source_port = CpPort(source_port)
self.dest_chassis = dest_chassis
self.destPort = CpPort(dest_port)
else:
self.source_chassis = dest_chassis
self.source_port = CpPort(dest_port)
self.dest_chassis = source_chassis
self.destPort = CpPort(source_port)
# count to track references to a particular link
self.reset_accounting()
# the maximum reference count is either self.max_ports or is the maximum number of physical links
# that this link can specify, whichever is smaller. If there is no limit, then maxRef is set to 10000
if self.destPort.ports and self.source_port.ports:
max_phys_ports = min(len(self.destPort.ports), len(self.source_port.ports))
elif self.destPort.ports:
max_phys_ports = len(self.destPort.ports)
elif self.source_port.ports:
max_phys_ports = len(self.source_port.ports)
else:
max_phys_ports = 10000
if self.maxPorts:
self.maxRef = min(max_phys_ports, self.maxPorts)
else:
self.maxRef = max_phys_ports
# self.minRef is the minimum number of physical links needed to meet the requirements of this link
if self.minPorts:
self.minRef = int(self.minPorts)
else:
self.minRef = 1
def reset_accounting(self):
"""Resets account on the source and dest ports as well as reference count
"""
self.destPort.reset_accounting()
self.source_port.reset_accounting()
self.refCount = 0
def remaining_need(self):
""" returns the remaining number of physical links needed to match against self to satisfy requirements.
The parameters used to calculate this value are reset by the reset_accounting() method which is typically
invoked when invoking a difference_link() method on the CABLEPLAN parent object.
:returns: int
"""
return max(0, self.minRef - self.refCount)
def remaining_avail(self):
""" returns the remaining number of physical links available to match against
The parameters used to calculate this value are reset by the reset_accounting() method which is typically
invoked when invoking a difference_link() method on the CABLEPLAN parent object.
:returns: int
"""
return max(0, self.maxRef - self.refCount)
def order(self):
"""Calculates the order of the link defined by the maximum number of physical links this link
can represent
:returns: int
"""
if self.source_port.ports and self.destPort.ports:
result = min(len(self.source_port.ports), len(self.destPort.ports))
elif self.source_port.ports:
result = len(self.source_port.ports)
elif self.destPort.ports:
result = len(self.destPort.ports)
else:
result = 10000 # this is the any-any case which is unlimited.
return result
def get_name(self):
if self.source_port.name():
stext = '%s-%s' % (self.source_chassis, self.source_port.name())
else:
stext = '%s' % self.source_chassis
if self.destPort.name():
dtext = '%s-%s' % (self.dest_chassis, self.destPort.name())
else:
dtext = '%s' % self.dest_chassis
return '(%s,%s)' % (stext, dtext)
def is_connected(self, switch1, switch2=None):
"""Returns True if switch1 is one of the switch endpoints of the link and switch2 is unspecified
otherwise is will return True if both switch1 and switch2 are switch endpoints of the link. If
switch1 is the same as switch2, it will return False.
:param switch1: first switch to check if it an end-point of the link
:param switch2: optional second switch to check if it an end-point of the link
:returns: True if switch1 (and optional switch2) is an end-point of the link
"""
s1 = (switch1 == self.source_chassis) or (switch1 == self.dest_chassis)
if switch2:
s2 = (self.source_chassis == switch2) or (self.dest_chassis == switch2)
else:
s2 = True
result = s1 and s2 and (switch1 != switch2)
return result
def __eq__(self, other):
if ((self.source_chassis == other.source_chassis) and (self.source_port == other.source_port) and
(self.dest_chassis == other.dest_chassis) and (self.destPort == other.destPort)):
return True
return False
def has_port_in_common(self, link):
"""Returns True if link has any ports that match self. It will compare
all ports included expanded lists of port sets.
:param link: link to check to see if matches, or overlaps, with self
:returns: Boolean
"""
if link.source_chassis == self.source_chassis:
lnk_ports = set(link.source_port.list())
slf_ports = set(self.source_port.list())
if len(lnk_ports & slf_ports) > 0:
return True
if link.dest_chassis == self.dest_chassis:
lnk_ports = set(link.destPort.list())
slf_ports = set(self.destPort.list())
if len(lnk_ports & slf_ports) > 0:
return True
if link.source_chassis == self.dest_chassis:
lnk_ports = set(link.source_port.list())
slf_ports = set(self.destPort.list())
if len(lnk_ports & slf_ports) > 0:
return True
if link.dest_chassis == self.source_chassis:
lnk_ports = set(link.destPort.list())
slf_ports = set(self.source_port.list())
if len(lnk_ports & slf_ports) > 0:
return True
return False
def __str__(self):
return self.get_name()
@staticmethod
def _get_attrib_str(attrib, value):
text = ''
if value is not None:
text = '%s=%s ' % (attrib, quote_attrib(value))
return text
def export(self, chassis, level):
"""Will return string of XML describing the LINK_INFO. It will use 'chassis' to determine
which is the source chassis so that it will be omitted from the XML and the other chassis will
become the destination. 'level' is the indentation level.
:param chassis: Chassis that is the parent of the LINK_INFO xml
:param level: Indentation level
:returns: str
"""
tag = 'LINK_INFO'
if chassis == self.source_chassis:
dport_text = self._get_attrib_str('destPort', self.destPort.name())
sport_text = self._get_attrib_str('sourcePort', self.source_port.name())
dchassis_text = self._get_attrib_str('destChassis', self.dest_chassis)
else:
dport_text = self._get_attrib_str('destPort', self.source_port.name())
sport_text = self._get_attrib_str('sourcePort', self.destPort.name())
dchassis_text = self._get_attrib_str('destChassis', self.source_chassis)
min_port_text = self._get_attrib_str('minPorts', self.minPorts)
max_port_text = self._get_attrib_str('maxPorts', self.maxPorts)
text = '<%s %s%s%s%s%s' % (tag, sport_text, dchassis_text, dport_text, min_port_text, max_port_text)
text = indent(level) + text.strip()
text += '/>\n'
return text
@staticmethod
def match_links(link1, link2):
"""This will match-up link1 and link2 and increment the reference count in each link for each
of the matches that happen. It will do this until the minimum number of links has been reached for
link1. It will return the number of matches that occurred.
:param link1: first link of type CpLink that is part of the matching
:param link2: second link of type CpLink that is part of the matching
:returns: number of matches that occured.
"""
result = 0
# match-up ends of link
if (link1.source_chassis == link2.source_chassis) and (link1.dest_chassis == link2.dest_chassis):
start1 = link1.source_port
start2 = link2.source_port
end1 = link1.destPort
end2 = link2.destPort
else:
# chassis don't match so no link match
return 0
# get ends in common - the maximum match will be the min of this overlap
if start1.ports != [] and start2.ports != []:
starts = list(set(start1.available_ports) & set(start2.available_ports))
elif start1.ports:
starts = start1.available_ports[:]
elif start2.ports:
starts = start2.available_ports[:]
else:
starts = 'any'
if end1.ports != [] and end2.ports != []:
ends = list(set(end1.available_ports) & set(end2.available_ports))
elif end1.ports:
ends = end1.available_ports[:]
elif end2.ports:
ends = end2.available_ports[:]
else:
ends = 'any'
if starts == 'any':
len_starts = 10000
else:
len_starts = len(starts)
if ends == 'any':
len_ends = 10000
else:
len_ends = len(ends)
num_to_retire = min(link1.remaining_need(), link2.remaining_avail(), len_starts, len_ends)
for index in range(num_to_retire):
if starts != 'any':
start1.remove_available_port(starts[index])
start2.remove_available_port(starts[index])
if ends != 'any':
end1.remove_available_port(ends[index])
end2.remove_available_port(ends[index])
link1.refCount += 1
link2.refCount += 1
result += 1
return result
# end class LINK
def compare_cable_plans(session, file1, file2=None):
if file2:
cp1 = CABLEPLAN.get(file1)
source1 = file1
cp2 = CABLEPLAN.get(file2)
source2 = file2
else:
resp = session.login()
if not resp.ok:
print('%% Could not login to APIC')
sys.exit(1)
cp1 = CABLEPLAN.get(session)
source1 = 'APIC'
cp2 = CABLEPLAN.get(file1)
source2 = file1
missing_switches = cp1.difference_switch(cp2)
extra_switches = cp2.difference_switch(cp1)
if missing_switches:
print('\nThe following switches are in %s, but not in %s' % (source1, source2))
for switch in missing_switches:
print(' ', switch.get_name())
if extra_switches:
print('\nThe following switches are in %s, but not in %s' % (source2, source1))
for switch in missing_switches:
print(' %s' % switch.get_name())
if missing_switches or extra_switches:
print('Link comparisons skipped because the switches are miss-matched')
else:
missing_links = cp1.difference_link(cp2)
extra_links = cp2.difference_link(cp1)
if missing_links:
print('\nThe following links in', source1, 'are not found in', source2)
for link in missing_links:
print(' %s' % link.get_name())
if extra_links:
print('\nThe following links in %s are not found in %s' % (source2, source1))
for link in extra_links:
print(' ', link.get_name())
if not missing_links and not extra_links:
print('%s and %s are the same' % (source1, source2))
def export_to_file(session, file1=None):
resp = session.login()
if not resp.ok:
print('%% Could not login to APIC')
sys.exit(1)
cp = CABLEPLAN.get(session)
if file1:
f = open(file1, 'w')
cp.export(f)
f.close()
else:
print cp.export(),
def main():
description = 'Simple application that logs on to the APIC and displays stats for all of the Interfaces.'
creds = ACI.Credentials('apic', description)
# group = creds.add_mutually_exclusive_group()
group1 = creds.add_argument_group('Export', 'Export a cable plan')
group1.add_argument('-e', '--export_file', default=None, const='export text', dest='export_file', nargs='?',
help='Export cableplan from running fabric. If EXPORT_FILE is specified, the '
'cableplan will be written to EXPORT_FILE')
group2 = creds.add_argument_group('Compare', 'Compare cable plans')
group2.add_argument('-c1', '--cableplan1',
type=str, nargs=1,
default=None,
help="Name of cableplan xml file. If only CABLEPLAN1 is specified, "
"it will be compared to the running fabric. If it is specified with "
"CABLEPLAN2 (the -c2 option), then it will compare CABLEPLAN1 with CABLEPLAN2")
group2.add_argument('-c2', '--cableplan2',
type=str, nargs=1,
default=None,
help="Name of second cableplan xml file. The second cableplan file. This file will "
"be compared to CABLEPLAN1. This option must only be used "
"in conjunction with the -c1 option.")
args = creds.get()
session = ACI.Session(args.url, args.login, args.password)
if args.export_file and (args.cableplan1 or args.cableplan2):
creds.print_help()
print('\nError: export and compare operations are mutually exclusive')
exit()
if args.cableplan2 and not args.cableplan1:
creds.print_help()
print('\nError: -c2 option only valid with -c1 option')
exit()
if not args.export_file and not args.cableplan1:
creds.print_help()
print('\nError: Either export (-e) or compare (-c1) is required')
exit()
if args.export_file:
if args.export_file == 'export text':
export_to_file(session)
else:
export_to_file(session, args.export_file)
if args.cableplan1:
if args.cableplan2:
compare_cable_plans(session, args.cableplan1[0], args.cableplan2[0])
else:
compare_cable_plans(session, args.cableplan1[0])
if __name__ == '__main__':
main()
__all__ = [
"CpPort",
"CpLink",
"CpSwitch",
"CABLEPLAN",
]
| 20,789 |
672 | <reponame>DogeCoding/iOSCompiledRuntime<gh_stars>100-1000
/*
* Copyright (c) 2000-2007 AppleInc. All rights reserved.
*/
/*
* @OSF_COPYRIGHT@
*/
/*
* HISTORY
*
* Revision 1.2 1998/09/30 21:20:45 wsanchez
* Merged in IntelMerge1 (mburg: Intel support)
*
* Revision 1.1.2.1 1998/09/30 18:18:50 mburg
* Changes for Intel port
*
* Revision 1.1.1.1 1998/03/07 02:25:38 wsanchez
* Import of OSF Mach kernel (~mburg)
*
* Revision 1.1.8.2 1996/07/31 09:46:36 paire
* Merged with nmk20b7_shared (1.1.11.2 -> 1.1.11.1)
* [96/06/10 paire]
*
* Revision 1.1.11.2 1996/06/13 12:38:25 bernadat
* Do not use inline macros when MACH_ASSERT is configured.
* [96/05/24 bernadat]
*
* Revision 1.1.11.1 1996/05/14 13:50:23 paire
* Added new linl and loutl __inline__.
* Added conditional compilation for [l]{in|oub}[bwl]() __inline__.
* [95/11/24 paire]
*
* Revision 1.1.8.1 1994/09/23 02:00:28 ezf
* change marker to not FREE
* [1994/09/22 21:25:52 ezf]
*
* Revision 1.1.4.5 1993/08/09 19:40:41 dswartz
* Add ANSI prototypes - CR#9523
* [1993/08/06 17:45:57 dswartz]
*
* Revision 1.1.4.4 1993/06/11 15:17:37 jeffc
* CR9176 - ANSI C violations: inb/outb macros must be changed from
* ({ ... }) to inline functions, with proper type definitions. Callers
* must pass proper types to these functions: 386 I/O port addresses
* are unsigned shorts (not pointers).
* [1993/06/10 14:26:10 jeffc]
*
* Revision 1.1.4.3 1993/06/07 22:09:28 jeffc
* CR9176 - ANSI C violations: trailing tokens on CPP
* directives, extra semicolons after decl_ ..., asm keywords
* [1993/06/07 19:00:26 jeffc]
*
* Revision 1.1.4.2 1993/06/04 15:28:45 jeffc
* CR9176 - ANSI problems -
* Added casts to get macros to take caddr_t as an I/O space address.
* [1993/06/04 13:45:55 jeffc]
*
* Revision 1.1 1992/09/30 02:25:51 robert
* Initial revision
*
* $EndLog$
*/
/* CMU_HIST */
/*
* Revision 2.5 91/05/14 16:14:20 mrt
* Correcting copyright
*
* Revision 2.4 91/02/05 17:13:56 mrt
* Changed to new Mach copyright
* [91/02/01 17:37:08 mrt]
*
* Revision 2.3 90/12/20 16:36:37 jeffreyh
* changes for __STDC__
* [90/12/07 jeffreyh]
*
* Revision 2.2 90/11/26 14:48:41 rvb
* Pulled from 2.5
* [90/11/22 10:09:38 rvb]
*
* [90/08/14 mg32]
*
* Now we know how types are factor in.
* Cleaned up a bunch: eliminated ({ for output and flushed unused
* output variables.
* [90/08/14 rvb]
*
* This is how its done in gcc:
* Created.
* [90/03/26 rvb]
*
*/
/* CMU_ENDHIST */
/*
* Mach Operating System
* Copyright (c) 1991,1990 Carnegie Mellon University
* All Rights Reserved.
*
* Permission to use, copy, modify and distribute this software and its
* documentation is hereby granted, provided that both the copyright
* notice and this permission notice appear in all copies of the
* software, derivative works or modified versions, and any portions
* thereof, and that both notices appear in supporting documentation.
*
* CARNEGIE MELLON ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS"
* CONDITION. CARNEGIE MELLON DISCLAIMS ANY LIABILITY OF ANY KIND FOR
* ANY DAMAGES WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE.
*
* Carnegie Mellon requests users of this software to return to
*
* Software Distribution Coordinator or <EMAIL>
* School of Computer Science
* Carnegie Mellon University
* Pittsburgh PA 15213-3890
*
* any improvements or extensions that they make and grant Carnegie Mellon
* the rights to redistribute these changes.
*/
/*
*/
#ifndef ARM_PIO_H
#define ARM_PIO_H
typedef unsigned short i386_ioport_t;
/* read a longword */
extern unsigned long inl(
i386_ioport_t port);
/* read a shortword */
extern unsigned short inw(
i386_ioport_t port);
/* read a byte */
extern unsigned char inb(
i386_ioport_t port);
/* write a longword */
extern void outl(
i386_ioport_t port,
unsigned long datum);
/* write a word */
extern void outw(
i386_ioport_t port,
unsigned short datum);
/* write a longword */
extern void outb(
i386_ioport_t port,
unsigned char datum);
/* input an array of longwords */
extern void linl(
i386_ioport_t port,
int * data,
int count);
/* output an array of longwords */
extern void loutl(
i386_ioport_t port,
int * data,
int count);
/* input an array of words */
extern void linw(
i386_ioport_t port,
int * data,
int count);
/* output an array of words */
extern void loutw(
i386_ioport_t port,
int * data,
int count);
/* input an array of bytes */
extern void linb(
i386_ioport_t port,
char * data,
int count);
/* output an array of bytes */
extern void loutb(
i386_ioport_t port,
char * data,
int count);
extern __inline__ unsigned long inl(
i386_ioport_t port)
{
unsigned long datum;
__asm__ volatile("inl %1, %0" : "=a" (datum) : "d" (port));
return(datum);
}
extern __inline__ unsigned short inw(
i386_ioport_t port)
{
unsigned short datum;
__asm__ volatile(".byte 0x66; inl %1, %0" : "=a" (datum) : "d" (port));
return(datum);
}
extern __inline__ unsigned char inb(
i386_ioport_t port)
{
unsigned char datum;
__asm__ volatile("inb %1, %0" : "=a" (datum) : "d" (port));
return(datum);
}
extern __inline__ void outl(
i386_ioport_t port,
unsigned long datum)
{
__asm__ volatile("outl %0, %1" : : "a" (datum), "d" (port));
}
extern __inline__ void outw(
i386_ioport_t port,
unsigned short datum)
{
__asm__ volatile(".byte 0x66; outl %0, %1" : : "a" (datum), "d" (port));
}
extern __inline__ void outb(
i386_ioport_t port,
unsigned char datum)
{
__asm__ volatile("outb %0, %1" : : "a" (datum), "d" (port));
}
#endif /* ARM_PIO_H */
| 2,526 |
407 | <filename>saas/cluster/api/clustermanage/clustermanage-server/src/main/java/com/alibaba/sreworks/clustermanage/server/controllers/ClusterController.java<gh_stars>100-1000
package com.alibaba.sreworks.clustermanage.server.controllers;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.sreworks.clustermanage.server.params.ClusterCreateParam;
import com.alibaba.sreworks.clustermanage.server.params.ClusterDeployClientParam;
import com.alibaba.sreworks.clustermanage.server.params.ClusterModifyParam;
import com.alibaba.sreworks.clustermanage.server.services.ClientPackageService;
import com.alibaba.sreworks.clustermanage.server.services.DeployClientService;
import com.alibaba.sreworks.common.util.JsonUtil;
import com.alibaba.sreworks.common.util.RegularUtil;
import com.alibaba.sreworks.common.util.StringUtil;
import com.alibaba.sreworks.domain.DO.Cluster;
import com.alibaba.sreworks.domain.repository.ClusterRepository;
import com.alibaba.sreworks.flyadmin.server.services.FlyadminAppmanagerClusterService;
import com.alibaba.sreworks.flyadmin.server.services.PluginClusterService;
import com.alibaba.tesla.common.base.TeslaBaseResult;
import com.alibaba.tesla.web.controller.BaseController;
import com.google.gson.JsonArray;
import io.kubernetes.client.openapi.ApiException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* @author jinghua.yjh
*/
@Slf4j
@RestController
@RequestMapping("/cluster")
@Api(tags = "集群")
public class ClusterController extends BaseController {
@Autowired
ClusterRepository clusterRepository;
@Autowired
PluginClusterService pluginClusterService;
@Autowired
FlyadminAppmanagerClusterService flyadminAppmanagerClusterService;
@Autowired
DeployClientService deployClientService;
@Autowired
private ClientPackageService clientPackageService;
@ApiOperation(value = "创建")
@RequestMapping(value = "create", method = RequestMethod.POST)
public TeslaBaseResult create(@RequestBody ClusterCreateParam param)
throws IOException, ApiException, InterruptedException {
Cluster cluster = param.toCluster(getUserEmployeeId());
String kubeConfig;
if(StringUtils.isEmpty(param.getKubeconfig())){
kubeConfig = pluginClusterService.getKubeConfig(
cluster.getAccountId(), param.getClusterName(), getUserEmployeeId());
}else{
kubeConfig = param.getKubeconfig();
}
if(!StringUtils.isEmpty(param.getDeployClient()) && param.getDeployClient().equals("enable")){
deployClientService.run(kubeConfig);
}
cluster.setKubeconfig(kubeConfig);
clusterRepository.saveAndFlush(cluster);
flyadminAppmanagerClusterService.create(cluster, getUserEmployeeId());
JSONObject result = new JSONObject();
result.put("clusterId", cluster.getId());
result.put("teamId", cluster.getTeamId());
return buildSucceedResult(result);
}
@ApiOperation(value = "删除")
@RequestMapping(value = "delete", method = RequestMethod.DELETE)
public TeslaBaseResult delete(Long id) throws IOException, ApiException {
Cluster cluster = clusterRepository.findFirstById(id);
clusterRepository.deleteById(id);
flyadminAppmanagerClusterService.delete(id, getUserEmployeeId());
JSONObject result = new JSONObject();
result.put("clusterId", id);
result.put("result", "OK");
result.put("teamId", cluster.getTeamId());
return buildSucceedResult(result);
}
@ApiOperation(value = "修改")
@RequestMapping(value = "modify", method = RequestMethod.PUT)
public TeslaBaseResult modify(Long id, @RequestBody ClusterModifyParam param) {
Cluster cluster = clusterRepository.findFirstById(id);
param.patchCluster(cluster, getUserEmployeeId());
clusterRepository.saveAndFlush(cluster);
JSONObject result = new JSONObject();
result.put("clusterId", cluster.getId());
result.put("result", "OK");
result.put("teamId", cluster.getTeamId());
return buildSucceedResult(result);
}
@ApiOperation(value = "详情")
@RequestMapping(value = "get", method = RequestMethod.GET)
public TeslaBaseResult get(Long id) {
JSONObject ret = clusterRepository.findObjectById(id);
RegularUtil.gmt2Date(ret);
RegularUtil.underscoreToCamel(ret);
return buildSucceedResult(ret);
}
@ApiOperation(value = "列表")
@RequestMapping(value = "list", method = RequestMethod.GET)
public TeslaBaseResult list(String name) {
name = StringUtil.isEmpty(name) ? "" : name;
List<JSONObject> ret = clusterRepository.findObjectByUserAndNameLike(getUserEmployeeId(), "%" + name + "%");
RegularUtil.underscoreToCamel(ret);
RegularUtil.gmt2Date(ret);
return buildSucceedResult(ret);
}
@ApiOperation(value = "公共列表")
@RequestMapping(value = "listPublic", method = RequestMethod.GET)
public TeslaBaseResult listPublic(String name) {
name = StringUtil.isEmpty(name) ? "" : name;
List<JSONObject> ret = clusterRepository.findObjectByVisibleScopeIsPublicAndNameLike("%" + name + "%");
RegularUtil.underscoreToCamel(ret);
RegularUtil.gmt2Date(ret);
return buildSucceedResult(ret);
}
@ApiOperation(value = "idSelector")
@RequestMapping(value = "idSelector", method = RequestMethod.GET)
public TeslaBaseResult idSelector(Long teamId) {
List<Cluster> clusterList = clusterRepository.findAllByTeamId(teamId);
return buildSucceedResult(JsonUtil.map(
"options", clusterList.stream().map(cluster -> JsonUtil.map(
"label", cluster.getName(),
"value", cluster.getId()
)).collect(Collectors.toList())
));
}
@ApiOperation(value = "纳管状态查询")
@RequestMapping(value = "checkClientStatus", method = RequestMethod.GET)
public TeslaBaseResult checkClientStatus(Long id) throws IOException, InterruptedException {
JSONObject ret = clusterRepository.findObjectById(id);
JSONObject resultDict = new JSONObject();
for(String clientName: clientPackageService.getNames()){
JSONObject obj = new JSONObject();
JSONObject clientInfo = clientPackageService.getValue(clientName);
obj.put("installStatus", false);
if(StringUtils.isNotBlank(clientInfo.getString("chart"))){
obj.put("chart", clientInfo.getString("chart"));
}
if(StringUtils.isNotBlank(clientInfo.getString("name"))){
obj.put("name", clientInfo.getString("name"));
}
if(StringUtils.isNotBlank(clientInfo.getString("chart"))) {
resultDict.put(clientInfo.getString("chart"), obj);
}
}
JSONArray helmList = deployClientService.listHelmInstallStatus(ret.getString("kubeconfig"));
for (int i = 0; i < helmList.size(); i++) {
JSONObject helmObject = helmList.getJSONObject(i);
String chartNameVersion = helmObject.getString("chart");
if(StringUtils.isNotBlank(chartNameVersion)){
if(!chartNameVersion.contains("-")){
continue;
}
String[] items = chartNameVersion.split("-");
String version = items[items.length -1];
String name = Arrays.stream(Arrays.copyOfRange(items, 0, items.length - 1))
.collect(Collectors.joining("-"));
if(resultDict.getJSONObject(name) != null){
JSONObject targetObject = resultDict.getJSONObject(name);
targetObject.put("currentVersion", version);
if(StringUtils.isNotBlank(helmObject.getString("namespace"))){
targetObject.put("currentNamespace", helmObject.getString("namespace"));
}
if(StringUtils.isNotBlank(helmObject.getString("status"))){
targetObject.put("currentHelmStatus", helmObject.getString("status"));
if(helmObject.getString("status").equals("deployed")){
targetObject.put("installStatus", true);
}
}
}
}
}
return buildSucceedResult(resultDict.values());
}
@ApiOperation(value = "纳管客户端重新部署")
@RequestMapping(value = "redeployClient", method = RequestMethod.PUT)
public TeslaBaseResult redeployClient(Long id,
@RequestBody ClusterDeployClientParam param) throws IOException, InterruptedException, TimeoutException {
JSONObject ret = clusterRepository.findObjectById(id);
JSONObject result = deployClientService.installAllPackages(ret.getString("kubeconfig"), param.getEnvMap());
return buildSucceedResult(result.values());
}
}
| 3,894 |
329 | // Licensed to Cloudera, Inc. under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Cloudera, Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.api.v8;
import com.cloudera.api.Parameters;
import com.cloudera.api.v1.MgmtRolesResource;
import java.io.InputStream;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public interface MgmtRolesResourceV8 extends MgmtRolesResource {
/**
* Retrieves the log file for the role's main process.
* <p>
* If the role is not started, this will be the log file associated with
* the last time the role was run.
* <p>
* Log files are returned as plain text (type "text/plain").
*
* @param roleName The role to fetch logs from.
* @return Contents of the role's log file.
*/
@GET
@Path("/{roleName}/logs/full")
@Produces(MediaType.TEXT_PLAIN)
public InputStream getFullLog(
@PathParam(Parameters.ROLE_NAME) String roleName);
/**
* Retrieves the role's standard output.
* <p>
* If the role is not started, this will be the output associated with
* the last time the role was run.
* <p>
* Log files are returned as plain text (type "text/plain").
*
* @param roleName The role to fetch stdout from.
* @return Contents of the role's standard output.
*/
@GET
@Path("/{roleName}/logs/stdout")
@Produces(MediaType.TEXT_PLAIN)
public InputStream getStandardOutput(
@PathParam(Parameters.ROLE_NAME) String roleName);
/**
* Retrieves the role's standard error output.
* <p>
* If the role is not started, this will be the output associated with
* the last time the role was run.
* <p>
* Log files are returned as plain text (type "text/plain").
*
* @param roleName The role to fetch stderr from.
* @return Contents of the role's standard error output.
*/
@GET
@Path("/{roleName}/logs/stderr")
@Produces(MediaType.TEXT_PLAIN)
public InputStream getStandardError(
@PathParam(Parameters.ROLE_NAME) String roleName);
/**
* Retrieves the stacks log file, if any, for the role's main process. Note
* that not all roles support periodic stacks collection.
*
* The log files are returned as plain text (type "text/plain").
*
* @param roleName The role to fetch stacks logs from.
* @return Contents of the role's log file.
*/
@GET
@Path("/{roleName}/logs/stacks")
@Produces(MediaType.TEXT_PLAIN)
public InputStream getStacksLog(
@PathParam(Parameters.ROLE_NAME) String roleName);
/**
* Download a zip-compressed archive of role stacks logs. Note that not all
* roles support periodic stacks collection.
*
* @param roleName The role to fetch the stacks logs bundle from.
* @return The archive data.
*/
@GET
@Path("/{roleName}/logs/stacksBundle")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public InputStream getStacksLogsBundle(
@PathParam(Parameters.ROLE_NAME) String roleName);
} | 1,243 |
2,740 | <filename>coremltools/converters/mil/mil/ops/tests/test_conv.py<gh_stars>1000+
# Copyright (c) 2020, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
from coremltools.converters.mil import testing_reqs
from coremltools.converters.mil.mil import get_new_symbol
from coremltools.converters.mil.testing_reqs import *
from .testing_utils import run_compare_builder
backends = testing_reqs.backends
class TestConvTranspose:
@pytest.mark.skipif(not testing_reqs._HAS_TORCH, reason="PyTorch not installed.")
@pytest.mark.parametrize(
",".join(
[
"use_cpu_only",
"backend",
"conv_dim",
"padding",
"DHWKdKhKw",
"stride",
"dilation",
"has_bias",
"groups",
"test_symbolic",
"test_output_shape",
]
),
itertools.product(
[True, False],
backends,
["conv1d", "conv2d", "conv3d"],
[(1, 2, 3), (2, 2, 2)],
[(7, 7, 7, 2, 2, 2), (10, 12, 14, 3, 2, 4)],
[(1, 1, 1), (2, 1, 2)],
[(1, 1, 1), (1, 2, 1)],
[True, False],
[1, 2],
[True, False],
[True, False],
),
)
def test_builder_to_backend_stress(
self,
use_cpu_only,
backend,
conv_dim,
padding,
DHWKdKhKw,
stride,
dilation,
has_bias,
groups,
test_symbolic,
test_output_shape,
):
if test_symbolic and test_output_shape:
# conv_transpose output_shape can only be constant (non-symbolic)
return
if backend[0] == "mlprogram" and groups == 2:
pytest.xfail("rdar://81999134 (ConvTranspose with group > 1 crashing on both CPU and GPU backend)")
D, H, W, Kd, Kh, Kw = DHWKdKhKw
N, C_in, C_out = 1, 1 * groups, 2 * groups
import torch
import torch.nn as nn
isDeconv1d = conv_dim == "conv1d"
isDeconv2d = conv_dim == "conv2d"
if isDeconv1d:
strides = [stride[0]]
dilations = [dilation[0]]
kernels = [Kh]
m = nn.ConvTranspose1d(
C_in,
C_out,
kernels,
stride=strides,
dilation=dilations,
bias=has_bias,
groups=groups,
padding=padding[0],
)
input_shape = [N, C_in, H]
paddings = [padding[0], padding[0]]
elif isDeconv2d:
strides = [stride[0], stride[1]]
dilations = [dilation[0], dilation[1]]
kernels = [Kh, Kw]
m = nn.ConvTranspose2d(
C_in,
C_out,
kernels,
stride=strides,
dilation=dilations,
bias=has_bias,
groups=groups,
padding=(padding[0], padding[1]),
)
input_shape = [N, C_in, H, W]
paddings = [padding[0], padding[0], padding[1], padding[1]]
else:
strides = [stride[0], stride[1], stride[2]]
dilations = [dilation[0], dilation[1], dilation[2]]
kernels = [Kd, Kh, Kw]
m = nn.ConvTranspose3d(
C_in,
C_out,
kernels,
stride=strides,
dilation=dilations,
bias=has_bias,
groups=groups,
padding=padding,
)
input_shape = [N, C_in, D, H, W]
paddings = [
padding[0],
padding[0],
padding[1],
padding[1],
padding[2],
padding[2],
]
wts = m.state_dict()
weight = wts["weight"].detach().numpy()
bias = wts["bias"].detach().numpy() if has_bias else None
input = torch.randn(*input_shape)
output = m(input)
output = output.detach().numpy()
input = input.detach().numpy()
output_shape = list(output.shape)
if test_symbolic:
# For symbolic input test
# Make Batch Size and input channel as symbolic
symbolic_batch_size = get_new_symbol()
input_shape[0] = symbolic_batch_size
output_shape[0] = symbolic_batch_size
expected_output_types = tuple(output_shape[:]) + (types.fp32,)
expected_outputs = [output]
input_placeholders = {"x": mb.placeholder(shape=input_shape)}
input_values = {"x": input}
def build(x):
arguments = {
"x": x,
"weight": weight,
"pad": paddings,
"pad_type": "custom",
"strides": strides,
"dilations": dilations,
"groups": groups,
}
if has_bias:
arguments["bias"] = bias
if test_output_shape:
arguments["output_shape"] = output.shape
return mb.conv_transpose(**arguments)
run_compare_builder(
build,
input_placeholders,
input_values,
expected_output_types,
expected_outputs,
use_cpu_only=use_cpu_only,
frontend_only=False,
backend=backend,
)
class TestConv:
@pytest.mark.skipif(not testing_reqs._HAS_TORCH, reason="PyTorch not installed.")
@pytest.mark.parametrize(
",".join(
[
"use_cpu_only",
"backend",
"conv_dim",
"padding",
"DHWKdKhKw",
"stride",
"dilation",
"has_bias",
"groups",
"symbolic",
]
),
itertools.product(
[True, False],
backends,
["conv1d", "conv2d", "conv3d"],
[(1, 1, 1), (2, 2, 2)],
[(5, 5, 5, 2, 2, 2), (10, 12, 14, 3, 2, 4)],
[(2, 2, 2), (2, 1, 1)],
[(1, 1, 1), (2, 1, 1)],
[True, False],
[1, 2],
[True, False],
),
)
def test_builder_to_backend_stress(
self,
use_cpu_only,
backend,
conv_dim,
padding,
DHWKdKhKw,
stride,
dilation,
has_bias,
groups,
symbolic,
):
D, H, W, Kd, Kh, Kw = DHWKdKhKw
N, C_in, C_out = 1, 1 * groups, 2 * groups
import torch
import torch.nn as nn
isConv1d = conv_dim == "conv1d"
isConv2d = conv_dim == "conv2d"
if isConv1d:
strides = [stride[0]]
dilations = [dilation[0]]
kernels = [Kh]
m = nn.Conv1d(
C_in,
C_out,
kernels,
stride=strides,
dilation=dilations,
bias=has_bias,
groups=groups,
padding=padding[0],
)
input_shape = [N, C_in, H]
paddings = [padding[0], padding[0]]
elif isConv2d:
strides = [stride[0], stride[1]]
dilations = [dilation[0], dilation[1]]
kernels = [Kh, Kw]
m = nn.Conv2d(
C_in,
C_out,
kernels,
stride=strides,
dilation=dilations,
bias=has_bias,
groups=groups,
padding=(padding[0], padding[1]),
)
input_shape = [N, C_in, H, W]
paddings = [padding[0], padding[0], padding[1], padding[1]]
else:
strides = [stride[0], stride[1], stride[2]]
dilations = [dilation[0], dilation[1], dilation[2]]
kernels = [Kd, Kh, Kw]
m = nn.Conv3d(
C_in,
C_out,
kernels,
stride=strides,
dilation=dilations,
bias=has_bias,
groups=groups,
padding=padding,
)
input_shape = [N, C_in, D, H, W]
paddings = [
padding[0],
padding[0],
padding[1],
padding[1],
padding[2],
padding[2],
]
wts = m.state_dict()
weight = wts["weight"].detach().numpy()
bias = wts["bias"].detach().numpy() if has_bias else None
# PyTorch and CoreML weight format is same
# PyTorch weight format: C_out, C_in, H, W
# MIL weight format: C_out, C_in, H, W
input = torch.randn(*input_shape)
output = m(input)
output = output.detach().numpy()
input = input.detach().numpy()
output_shape = list(output.shape)
if symbolic:
# For symbolic input test
# Make Batch Size and input channel as symbolic
symbolic_batch_size = get_new_symbol()
input_shape[0] = symbolic_batch_size
output_shape[0] = symbolic_batch_size
expected_output_types = tuple(output_shape[:]) + (types.fp32,)
expected_outputs = [output]
input_placeholders = {"x": mb.placeholder(shape=input_shape)}
input_values = {"x": input}
def build(x):
arguments = {
"x": x,
"weight": weight,
"pad": paddings,
"pad_type": "custom",
"strides": strides,
"dilations": dilations,
"groups": groups,
}
if has_bias:
arguments["bias"] = bias
return mb.conv(**arguments)
run_compare_builder(
build,
input_placeholders,
input_values,
expected_output_types,
expected_outputs,
use_cpu_only=use_cpu_only,
frontend_only=False,
backend=backend,
)
@pytest.mark.skip("<rdar://problem/53460668> Dynamic weights + bias not supported on GPU")
@pytest.mark.skipif(not testing_reqs._HAS_TORCH, reason="PyTorch not installed.")
@pytest.mark.parametrize(
",".join(
[
"use_cpu_only",
"backend",
"conv_dim",
"padding",
"DHWKdKhKw",
"stride",
"groups",
"symbolic",
"has_bias"
]
),
itertools.product(
[True, False],
backends,
["conv1d", "conv2d"],
[(1, 1, 1), (2, 2, 2)],
[(5, 5, 5, 2, 2, 2), (10, 12, 14, 3, 2, 4)],
[(1, 2, 1)],
[1, 2],
[True, False],
[True],
),
)
def test_builder_to_backend_stress_weights_input(
self,
use_cpu_only,
backend,
conv_dim,
padding,
DHWKdKhKw,
stride,
groups,
symbolic,
has_bias
):
D, H, W, Kd, Kh, Kw = DHWKdKhKw
N, C_in, C_out = 1, 1 * groups, 2 * groups
import torch
import torch.nn as nn
isConv1d = conv_dim == "conv1d"
isConv2d = conv_dim == "conv2d"
if isConv1d:
strides = [stride[0]]
kernels = [Kh]
m = nn.Conv1d(
C_in,
C_out,
kernels,
stride=strides,
bias=has_bias,
groups=groups,
padding=padding[0],
)
input_shape = [N, C_in, H]
paddings = [padding[0], padding[0]]
elif isConv2d:
strides = [stride[0], stride[1]]
kernels = [Kh, Kw]
m = nn.Conv2d(
C_in,
C_out,
kernels,
stride=strides,
groups=groups,
padding=(padding[0], padding[1]),
bias=has_bias,
)
input_shape = [N, C_in, H, W]
paddings = [padding[0], padding[0], padding[1], padding[1]]
wts = m.state_dict()
weight = wts["weight"].detach().numpy()
bias = wts["bias"].detach().numpy() if has_bias else None
# PyTorch and CoreML weight format is same
# PyTorch weight format: C_out, C_in, H, W
# MIL weight format: C_out, C_in, H, W
input = torch.randn(*input_shape)
output = m(input)
output = output.detach().numpy()
input = input.detach().numpy()
output_shape = list(output.shape)
if symbolic:
# For symbolic input test
# Make Batch Size and input channel as symbolic
symbolic_batch_size = get_new_symbol()
input_shape[0] = symbolic_batch_size
output_shape[0] = symbolic_batch_size
expected_output_types = tuple(output_shape[:]) + (types.fp32,)
expected_outputs = [output]
input_placeholders = {"x": mb.placeholder(shape=input_shape), "input_weight":mb.placeholder(shape=weight.shape)}
input_values = {"x": input, "input_weight": weight}
def build(x, input_weight):
arguments = {
"x": x,
"weight": input_weight,
"pad": paddings,
"pad_type": "custom",
"strides": strides,
"groups": groups,
}
if has_bias:
arguments["bias"] = bias
return mb.conv(**arguments)
run_compare_builder(
build,
input_placeholders,
input_values,
expected_output_types,
expected_outputs,
use_cpu_only=use_cpu_only,
frontend_only=False,
backend=backend,
)
@pytest.mark.parametrize(
"use_cpu_only, backend", itertools.product([True], backends, )
)
def test_conv_int_bias_fusion(self, use_cpu_only, backend):
"""
Test conv bias fusion when const input is of type int.
Expected behavior is that the bias const will be cast to the same dtype as the
weight during the fuse_conv_bias pass, otherwise mb.conv() will raise an error.
Input graph:
Const(int type)
|
V
input -----> convolution -----> add/sub ---> out
Output graph:
input -----> convolution -----> out
"""
weight = np.array([2.5], dtype=np.float32).reshape([1, 1, 1, 1])
def build(x):
x = mb.conv(x=x, weight=weight)
bias = mb.const(val=[10])
return mb.add(x=x, y=bias)
input = np.array([1, 2, 3, 4], dtype=np.float32).reshape((1, 1, 2, 2))
output = np.array([12.5, 15.0, 17.5, 20.0], dtype=np.float32).reshape((1, 1, 2, 2))
expected_output_types = output.shape + (types.fp32,)
expected_outputs = [output]
input_placeholders = {"x": mb.placeholder(shape=input.shape)}
input_values = {"x": input}
run_compare_builder(
build,
input_placeholders,
input_values,
expected_output_types,
expected_outputs,
use_cpu_only=use_cpu_only,
frontend_only=False,
backend=backend,
)
| 9,153 |
574 | <reponame>xuanwcn/swift<filename>swift-generator/src/main/java/com/facebook/swift/generator/template/EnumContext.java
/*
* Copyright (C) 2012 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.swift.generator.template;
import java.util.List;
import com.google.common.collect.Lists;
import com.facebook.swift.generator.SwiftDocumentContext;
public class EnumContext extends BaseJavaContext
{
private final String javaPackage;
private final String javaName;
private final List<EnumFieldContext> fields = Lists.newArrayList();
EnumContext(SwiftDocumentContext swiftDocumentContext, String javaPackage, String javaName)
{
super(swiftDocumentContext);
this.javaPackage = javaPackage;
this.javaName = javaName;
}
public void addField(final EnumFieldContext parameter)
{
this.fields.add(parameter);
}
public List<EnumFieldContext> getFields()
{
return fields;
}
@Override
public String getJavaPackage()
{
return javaPackage;
}
@Override
public String getJavaName()
{
return javaName;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((fields == null) ? 0 : fields.hashCode());
result = prime * result + ((javaName == null) ? 0 : javaName.hashCode());
result = prime * result + ((javaPackage == null) ? 0 : javaPackage.hashCode());
return result;
}
@Override
public boolean equals(final Object obj)
{
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final EnumContext other = (EnumContext) obj;
if (fields == null) {
if (other.fields != null) {
return false;
}
}
else if (!fields.equals(other.fields)) {
return false;
}
if (javaName == null) {
if (other.javaName != null) {
return false;
}
}
else if (!javaName.equals(other.javaName)) {
return false;
}
if (javaPackage == null) {
if (other.javaPackage != null) {
return false;
}
}
else if (!javaPackage.equals(other.javaPackage)) {
return false;
}
return true;
}
@Override
public String toString()
{
return "EnumContext [javaPackage=" + javaPackage + ", javaName=" + javaName + ", fields=" + fields + "]";
}
}
| 1,317 |
4,224 | <reponame>lgarciaos/Firmware
/****************************************************************************
*
* Copyright (c) 2021 PX4 Development Team. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name PX4 nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
#pragma once
#include "UavcanPublisherBase.hpp"
#include <uavcan/equipment/range_sensor/Measurement.hpp>
#include <uORB/SubscriptionCallback.hpp>
#include <uORB/topics/distance_sensor.h>
namespace uavcannode
{
class RangeSensorMeasurement :
public UavcanPublisherBase,
public uORB::SubscriptionCallbackWorkItem,
private uavcan::Publisher<uavcan::equipment::range_sensor::Measurement>
{
public:
RangeSensorMeasurement(px4::WorkItem *work_item, uavcan::INode &node, uint8_t instance = 0) :
UavcanPublisherBase(uavcan::equipment::range_sensor::Measurement::DefaultDataTypeID),
uORB::SubscriptionCallbackWorkItem(work_item, ORB_ID(distance_sensor), instance),
uavcan::Publisher<uavcan::equipment::range_sensor::Measurement>(node)
{
this->setPriority(uavcan::TransferPriority::Default);
}
void PrintInfo() override
{
if (uORB::SubscriptionCallbackWorkItem::advertised()) {
printf("\t%s -> %s:%d\n",
uORB::SubscriptionCallbackWorkItem::get_topic()->o_name,
uavcan::equipment::range_sensor::Measurement::getDataTypeFullName(),
uavcan::equipment::range_sensor::Measurement::DefaultDataTypeID);
}
}
void BroadcastAnyUpdates() override
{
// distance_sensor[] -> uavcan::equipment::range_sensor::Measurement
distance_sensor_s dist;
if (uORB::SubscriptionCallbackWorkItem::update(&dist)) {
uavcan::equipment::range_sensor::Measurement range_sensor{};
range_sensor.sensor_id = get_instance();
range_sensor.range = dist.current_distance;
range_sensor.field_of_view = dist.h_fov;
// sensor type
switch (dist.type) {
case distance_sensor_s::MAV_DISTANCE_SENSOR_LASER:
range_sensor.sensor_type = uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_LIDAR;
break;
case distance_sensor_s::MAV_DISTANCE_SENSOR_ULTRASOUND:
range_sensor.sensor_type = uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_SONAR;
break;
case distance_sensor_s::MAV_DISTANCE_SENSOR_RADAR:
range_sensor.sensor_type = uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_RADAR;
break;
case distance_sensor_s::MAV_DISTANCE_SENSOR_INFRARED:
default:
range_sensor.sensor_type = uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_UNDEFINED;
break;
}
// reading_type
if (dist.current_distance >= dist.max_distance) {
range_sensor.reading_type = uavcan::equipment::range_sensor::Measurement::READING_TYPE_TOO_FAR;
} else if (dist.current_distance <= dist.min_distance) {
range_sensor.reading_type = uavcan::equipment::range_sensor::Measurement::READING_TYPE_TOO_CLOSE;
} else if (dist.signal_quality != 0) {
range_sensor.reading_type = uavcan::equipment::range_sensor::Measurement::READING_TYPE_VALID_RANGE;
} else {
range_sensor.reading_type = uavcan::equipment::range_sensor::Measurement::READING_TYPE_UNDEFINED;
}
uavcan::Publisher<uavcan::equipment::range_sensor::Measurement>::broadcast(range_sensor);
// ensure callback is registered
uORB::SubscriptionCallbackWorkItem::registerCallback();
}
}
};
} // namespace uavcannode
| 1,694 |
348 | <filename>docs/data/leg-t2/033/03302063.json
{"nom":"Bordeaux","circ":"2ème circonscription","dpt":"Gironde","inscrits":65995,"abs":40852,"votants":25143,"blancs":2576,"nuls":818,"exp":21749,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":13874},{"nuance":"LR","nom":"Mme <NAME>","voix":7875}]} | 122 |
518 | {
"name": " <NAME>",
"category": "HR & Legal",
"start_url": "https://newton.newtonsoftware.com/login",
"icons": [
{
"src": "https://cdn.filestackcontent.com/B0MGgcfTuOSfnTkpRygm"
},
{
"src": "https://cdn.filestackcontent.com/du4EZHRQ3S7p2yJY27zx",
"platform": "browserx"
}
],
"theme_color": "#f64266",
"scope": "https://newton.newtonsoftware.com",
"bx_legacy_service_id": "newton-software"
}
| 209 |
764 | {
"symbol": "MAX",
"account_name": "eosmax1token",
"overview": {
"en": "EOSMax is a gaming entertainment platform based on the intelligent contract of Eos main network. The platform's gambling games all adopt the original random number generation technology, and the team has made the algorithm open source, supporting the result verification. Meanwhile, the platform will airdrop the platform token MAX free of charge to the players through the mode of \"game is mining\". In the future, 100% of the profits of the platform will be distributed to the holders of the MAX token, making the player a shareholder of the platform."
},
"website": "https://eosmax.io"
} | 181 |
3,156 | from test.integration.base import DBTIntegrationTest, use_profile
import yaml
class TestBaseCaching(DBTIntegrationTest):
@property
def schema(self):
return "adapter_methods_caching"
@property
def models(self):
return "models"
@property
def project_config(self):
return {
'config-version': 2,
'test-paths': ['tests']
}
@use_profile('postgres')
def test_postgres_adapter_methods(self):
self.run_dbt(['compile']) # trigger any compile-time issues
self.run_dbt()
self.assertTablesEqual('model', 'expected')
| 267 |
3,027 | <reponame>joedomino874/hummingbot
import asyncio
import json
import unittest
from typing import Awaitable, List
from unittest.mock import AsyncMock, patch
import aiohttp
from hummingbot.core.web_assistant.connections.ws_connection import WSConnection
from hummingbot.core.web_assistant.connections.data_types import WSRequest, WSResponse
from test.hummingbot.connector.network_mocking_assistant import (
NetworkMockingAssistant
)
class WSConnectionTest(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.ev_loop = asyncio.get_event_loop()
cls.ws_url = "ws://some/url"
def setUp(self) -> None:
super().setUp()
self.mocking_assistant = NetworkMockingAssistant()
self.client_session = aiohttp.ClientSession()
self.ws_connection = WSConnection(self.client_session)
self.async_tasks: List[asyncio.Task] = []
def tearDown(self) -> None:
self.ws_connection.disconnect()
self.client_session.close()
for task in self.async_tasks:
task.cancel()
super().tearDown()
def async_run_with_timeout(self, coroutine: Awaitable, timeout: int = 1):
ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout))
return ret
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_connect_and_disconnect(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.assertFalse(self.ws_connection.connected)
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.assertTrue(self.ws_connection.connected)
self.async_run_with_timeout(self.ws_connection.disconnect())
self.assertFalse(self.ws_connection.connected)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_attempt_to_connect_second_time_raises(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
with self.assertRaises(RuntimeError) as e:
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.assertEqual("WS is connected.", str(e.exception))
def test_send_when_disconnected_raises(self):
request = WSRequest(payload={"one": 1})
with self.assertRaises(RuntimeError) as e:
self.async_run_with_timeout(self.ws_connection.send(request))
self.assertEqual("WS is not connected.", str(e.exception))
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_send(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
request = WSRequest(payload={"one": 1})
self.async_run_with_timeout(self.ws_connection.send(request))
json_msgs = self.mocking_assistant.json_messages_sent_through_websocket(
ws_connect_mock.return_value
)
self.assertEqual(1, len(json_msgs))
self.assertEqual(request.payload, json_msgs[0])
def test_receive_when_disconnected_raises(self):
with self.assertRaises(RuntimeError) as e:
self.async_run_with_timeout(self.ws_connection.receive())
self.assertEqual("WS is not connected.", str(e.exception))
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_raises_on_timeout(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
def raise_timeout(*_, **__):
raise asyncio.TimeoutError
ws_connect_mock.return_value.receive.side_effect = raise_timeout
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
with self.assertRaises(asyncio.TimeoutError) as e:
self.async_run_with_timeout(self.ws_connection.receive())
self.assertEqual("Message receive timed out.", str(e.exception))
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
data = {"one": 1}
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message=json.dumps(data)
)
self.assertEqual(0, self.ws_connection.last_recv_time)
response = self.async_run_with_timeout(self.ws_connection.receive())
self.assertIsInstance(response, WSResponse)
self.assertEqual(data, response.data)
self.assertNotEqual(0, self.ws_connection.last_recv_time)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_disconnects_and_raises_on_aiohttp_closed(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.CLOSED
)
with self.assertRaises(ConnectionError) as e:
self.async_run_with_timeout(self.ws_connection.receive())
self.assertEqual("The WS connection was closed unexpectedly.", str(e.exception))
self.assertFalse(self.ws_connection.connected)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_disconnects_and_raises_on_aiohttp_close(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.CLOSE
)
with self.assertRaises(ConnectionError) as e:
self.async_run_with_timeout(self.ws_connection.receive())
self.assertEqual("The WS connection was closed unexpectedly.", str(e.exception))
self.assertFalse(self.ws_connection.connected)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_ignores_aiohttp_close_msg_if_disconnect_called(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.CLOSED
)
prev_side_effect = ws_connect_mock.return_value.receive.side_effect
async def disconnect_on_side_effect(*args, **kwargs):
await self.ws_connection.disconnect()
return await prev_side_effect(*args, **kwargs)
ws_connect_mock.return_value.receive.side_effect = disconnect_on_side_effect
response = self.async_run_with_timeout(self.ws_connection.receive())
self.assertFalse(self.ws_connection.connected)
self.assertIsNone(response)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_ignores_ping(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.PING
)
data = {"one": 1}
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message=json.dumps(data)
)
response = self.async_run_with_timeout(self.ws_connection.receive())
self.assertEqual(data, response.data)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_sends_pong_on_ping(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.PING
)
receive_task = self.ev_loop.create_task(self.ws_connection.receive())
self.async_tasks.append(receive_task)
self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value)
ws_connect_mock.return_value.pong.assert_called()
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_ping_updates_last_recv_time(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.PING
)
receive_task = self.ev_loop.create_task(self.ws_connection.receive())
self.async_tasks.append(receive_task)
self.assertEqual(0, self.ws_connection.last_recv_time)
self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value)
self.assertNotEqual(0, self.ws_connection.last_recv_time)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_ignores_pong(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.PONG
)
data = {"one": 1}
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message=json.dumps(data)
)
response = self.async_run_with_timeout(self.ws_connection.receive())
self.assertEqual(data, response.data)
@patch("aiohttp.client.ClientSession.ws_connect", new_callable=AsyncMock)
def test_receive_pong_updates_last_recv_time(self, ws_connect_mock):
ws_connect_mock.return_value = self.mocking_assistant.create_websocket_mock()
self.async_run_with_timeout(self.ws_connection.connect(self.ws_url))
self.mocking_assistant.add_websocket_aiohttp_message(
ws_connect_mock.return_value, message="", message_type=aiohttp.WSMsgType.PONG
)
receive_task = self.ev_loop.create_task(self.ws_connection.receive())
self.async_tasks.append(receive_task)
self.assertEqual(0, self.ws_connection.last_recv_time)
self.mocking_assistant.run_until_all_aiohttp_messages_delivered(ws_connect_mock.return_value)
self.assertNotEqual(0, self.ws_connection.last_recv_time)
| 4,839 |
407 | package com.alibaba.tesla.appmanager.server.action;
import com.alibaba.tesla.appmanager.common.exception.AppErrorCode;
import com.alibaba.tesla.appmanager.common.exception.AppException;
import com.alibaba.tesla.appmanager.server.event.loader.ComponentPackageTaskStateActionLoadedEvent;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Service;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* ComponentPackage State Action 管理器
*
* @author <EMAIL>
*/
@Service
@Slf4j
public class ComponentPackageTaskStateActionManager
implements ApplicationListener<ComponentPackageTaskStateActionLoadedEvent> {
private static final String LOG_PRE = "[" + ComponentPackageTaskStateActionManager.class.getSimpleName()
+ "] action=actionManager.componentPackageTask|message=";
@Autowired
private ApplicationContext context;
private ConcurrentMap<String, ComponentPackageTaskStateAction> instanceMap = new ConcurrentHashMap<>();
/**
* 注册 State 处理 Action
*
* @param key State 名称
* @param instance 实例
*/
private void register(String key, ComponentPackageTaskStateAction instance) {
instanceMap.put(key, instance);
}
/**
* 自动根据事件注册 Instance 实例
*
* @param event 事件
*/
@Override
public void onApplicationEvent(ComponentPackageTaskStateActionLoadedEvent event) {
String key = event.getKey();
Object bean;
try {
bean = context.getBean(event.getBeanName());
} catch (Exception e) {
String message = String.format("cannot get bean now, failed to load action instance|beanName=%s|key=%s",
event.getBeanName(), event.getKey());
log.error(message);
throw new AppException(AppErrorCode.UNKNOWN_ERROR, message);
}
register(key, (ComponentPackageTaskStateAction) bean);
log.info("component package task state action instance {} has registered", key);
}
/**
* 获取指定 State 的实例
*
* @param key State 名称
* @return ComponentPackageTaskStateAction 实例
*/
public ComponentPackageTaskStateAction getInstance(String key) {
ComponentPackageTaskStateAction instance = instanceMap.get(key);
if (instance == null) {
throw new AppException(AppErrorCode.INVALID_USER_ARGS, "invalid state name " + key);
}
return instance;
}
}
| 1,017 |
1,338 | <filename>headers/os/app/Handler.h
/*
* Copyright 2001-2014 Haiku, Inc. All rights reserved.
* Distributed under the terms of the MIT License.
*
* Authors:
* <NAME>, <EMAIL>
*/
#ifndef _HANDLER_H
#define _HANDLER_H
#include <Archivable.h>
class BLooper;
class BMessageFilter;
class BMessage;
class BMessenger;
class BList;
#define B_OBSERVE_WHAT_CHANGE "be:observe_change_what"
#define B_OBSERVE_ORIGINAL_WHAT "be:observe_orig_what"
const uint32 B_OBSERVER_OBSERVE_ALL = 0xffffffff;
namespace BPrivate {
class ObserverList;
}
class BHandler : public BArchivable {
public:
BHandler(const char* name = NULL);
virtual ~BHandler();
// Archiving
BHandler(BMessage* data);
static BArchivable* Instantiate(BMessage* data);
virtual status_t Archive(BMessage* data, bool deep = true) const;
// BHandler guts.
virtual void MessageReceived(BMessage* message);
BLooper* Looper() const;
void SetName(const char* name);
const char* Name() const;
virtual void SetNextHandler(BHandler* handler);
BHandler* NextHandler() const;
// Message filtering
virtual void AddFilter(BMessageFilter* filter);
virtual bool RemoveFilter(BMessageFilter* filter);
virtual void SetFilterList(BList* filters);
BList* FilterList();
bool LockLooper();
status_t LockLooperWithTimeout(bigtime_t timeout);
void UnlockLooper();
// Scripting
virtual BHandler* ResolveSpecifier(BMessage* message, int32 index,
BMessage* specifier, int32 what,
const char* property);
virtual status_t GetSupportedSuites(BMessage* data);
// Observer calls, inter-looper and inter-team
status_t StartWatching(BMessenger target, uint32 what);
status_t StartWatchingAll(BMessenger target);
status_t StopWatching(BMessenger target, uint32 what);
status_t StopWatchingAll(BMessenger target);
// Observer calls for observing targets in the local team
status_t StartWatching(BHandler* observer, uint32 what);
status_t StartWatchingAll(BHandler* observer);
status_t StopWatching(BHandler* observer, uint32 what);
status_t StopWatchingAll(BHandler* observer);
// Reserved
virtual status_t Perform(perform_code d, void* arg);
// Notifier calls
virtual void SendNotices(uint32 what,
const BMessage* notice = NULL);
bool IsWatched() const;
private:
typedef BArchivable _inherited;
friend inline int32 _get_object_token_(const BHandler* );
friend class BLooper;
friend class BMessageFilter;
virtual void _ReservedHandler2();
virtual void _ReservedHandler3();
virtual void _ReservedHandler4();
void _InitData(const char* name);
BPrivate::ObserverList* _ObserverList();
BHandler(const BHandler&);
BHandler& operator=(const BHandler&);
void SetLooper(BLooper* looper);
int32 fToken;
char* fName;
BLooper* fLooper;
BHandler* fNextHandler;
BList* fFilters;
BPrivate::ObserverList* fObserverList;
uint32 _reserved[3];
};
#endif // _HANDLER_H
| 1,170 |
879 | <filename>header/src/main/java/org/zstack/header/storage/backup/ExportImageFromBackupStorageMsg.java<gh_stars>100-1000
package org.zstack.header.storage.backup;
import org.zstack.header.message.NeedReplyMessage;
/**
* Created by mingjian.deng on 17/2/21.
*/
public class ExportImageFromBackupStorageMsg extends NeedReplyMessage implements BackupStorageMessage {
private String backupStorageUuid;
private String imageUuid;
private String rawPath;
private String exportFormat;
@Override
public String getBackupStorageUuid() {
return backupStorageUuid;
}
public void setBackupStorageUuid(String backupStorageUuid) {
this.backupStorageUuid = backupStorageUuid;
}
public String getImageUuid() {
return imageUuid;
}
public void setImageUuid(String imageUuid) {
this.imageUuid = imageUuid;
}
public String getExportFormat() {
return exportFormat;
}
public void setExportFormat(String exportFormat) {
this.exportFormat = exportFormat;
}
public String getRawPath() {
return rawPath;
}
public void setRawPath(String rawPath) {
this.rawPath = rawPath;
}
}
| 436 |
815 | <filename>test/t/src/lily_backbone_bindings.h
#ifndef LILY_BACKBONE_BINDINGS_H
#define LILY_BACKBONE_BINDINGS_H
/* Generated by lily-bindgen, do not edit. */
#if defined(_WIN32) && !defined(LILY_NO_EXPORT)
#define LILY_BACKBONE_EXPORT __declspec(dllexport)
#else
#define LILY_BACKBONE_EXPORT
#endif
#define GET_Interpreter__import_hook(c_) \
lily_con_get(c_, 0)
#define SET_Interpreter__import_hook(c_, v_) \
lily_con_set(c_, 0, v_)
#define SETFS_Interpreter__import_hook(state, c_) \
lily_con_set_from_stack(state, c_, 0)
#define GET_Interpreter__raw(c_) \
lily_con_get(c_, 1)
#define SET_Interpreter__raw(c_, v_) \
lily_con_set(c_, 1, v_)
#define SETFS_Interpreter__raw(state, c_) \
lily_con_set_from_stack(state, c_, 1)
#define ID_Interpreter(s_) \
lily_cid_at(s_, 0)
#define SUPER_Interpreter(s_) \
lily_push_super(s_, ID_Interpreter(s_), 2)
#define ARG_RawInterpreter(s_, i_) \
(lily_backbone_RawInterpreter *)lily_arg_generic(s_, i_)
#define AS_RawInterpreter(v_) \
(lily_backbone_RawInterpreter *)lily_as_generic(v_)
#define ID_RawInterpreter(s_) \
lily_cid_at(s_, 1)
#define INIT_RawInterpreter(s_) \
(lily_backbone_RawInterpreter *)lily_push_foreign(s_, ID_RawInterpreter(s_), (lily_destroy_func)destroy_RawInterpreter, sizeof(lily_backbone_RawInterpreter))
#define GET_TestCaseBase__fail_count(c_) \
lily_con_get(c_, 0)
#define SET_TestCaseBase__fail_count(c_, v_) \
lily_con_set(c_, 0, v_)
#define SETFS_TestCaseBase__fail_count(state, c_) \
lily_con_set_from_stack(state, c_, 0)
#define GET_TestCaseBase__pass_count(c_) \
lily_con_get(c_, 1)
#define SET_TestCaseBase__pass_count(c_, v_) \
lily_con_set(c_, 1, v_)
#define SETFS_TestCaseBase__pass_count(state, c_) \
lily_con_set_from_stack(state, c_, 1)
#define GET_TestCaseBase__skip_count(c_) \
lily_con_get(c_, 2)
#define SET_TestCaseBase__skip_count(c_, v_) \
lily_con_set(c_, 2, v_)
#define SETFS_TestCaseBase__skip_count(state, c_) \
lily_con_set_from_stack(state, c_, 2)
#define ID_TestCaseBase(s_) \
lily_cid_at(s_, 2)
#define SUPER_TestCaseBase(s_) \
lily_push_super(s_, ID_TestCaseBase(s_), 3)
LILY_BACKBONE_EXPORT
const char *lily_backbone_info_table[] = {
"\03Interpreter\0RawInterpreter\0TestCaseBase\0"
,"N\032Interpreter\0"
,"m\0<new>\0: Interpreter"
,"m\0config_set_extra_info\0(Interpreter,Boolean): Interpreter"
,"m\0error\0(Interpreter): String"
,"m\0error_message\0(Interpreter): String"
,"m\0exit_code\0(Interpreter): Byte"
,"m\0has_exited\0(Interpreter): Boolean"
,"m\0import_current_root_dir\0(Interpreter): String"
,"m\0import_file\0(Interpreter,String): Boolean"
,"m\0import_hook_reset\0(Interpreter)"
,"m\0import_hook_set\0(Interpreter,Function(Interpreter,String))"
,"m\0import_library\0(Interpreter,String): Boolean"
,"m\0import_string\0(Interpreter,String,String): Boolean"
,"m\0import_use_local_dir\0(Interpreter,String)"
,"m\0import_use_package_dir\0(Interpreter,String)"
,"m\0parse_expr\0(Interpreter,String,String): Option[String]"
,"m\0parse_file\0(Interpreter,String): Boolean"
,"m\0parse_manifest_file\0(Interpreter,String): Boolean"
,"m\0parse_manifest_string\0(Interpreter,String,String): Boolean"
,"m\0parse_string\0(Interpreter,String,String): Boolean"
,"m\0render_file\0(Interpreter,String): Option[String]"
,"m\0render_string\0(Interpreter,String,String): Option[String]"
,"m\0set_hook\0(Interpreter,Function(Interpreter,String))"
,"m\0validate_file\0(Interpreter,String): Boolean"
,"m\0validate_string\0(Interpreter,String,String): Boolean"
,"1\0import_hook\0Function(Interpreter,String)"
,"1\0raw\0RawInterpreter"
,"C\0RawInterpreter\0"
,"N\05TestCaseBase\0"
,"m\0<new>\0: TestCaseBase"
,"m\0run_tests\0(TestCaseBase)"
,"3\0fail_count\0Integer"
,"3\0pass_count\0Integer"
,"3\0skip_count\0Integer"
,"Z"
};
#define LILY_DECLARE_BACKBONE_CALL_TABLE \
LILY_BACKBONE_EXPORT \
lily_call_entry_func lily_backbone_call_table[] = { \
NULL, \
NULL, \
lily_backbone_Interpreter_new, \
lily_backbone_Interpreter_config_set_extra_info, \
lily_backbone_Interpreter_error, \
lily_backbone_Interpreter_error_message, \
lily_backbone_Interpreter_exit_code, \
lily_backbone_Interpreter_has_exited, \
lily_backbone_Interpreter_import_current_root_dir, \
lily_backbone_Interpreter_import_file, \
lily_backbone_Interpreter_import_hook_reset, \
lily_backbone_Interpreter_import_hook_set, \
lily_backbone_Interpreter_import_library, \
lily_backbone_Interpreter_import_string, \
lily_backbone_Interpreter_import_use_local_dir, \
lily_backbone_Interpreter_import_use_package_dir, \
lily_backbone_Interpreter_parse_expr, \
lily_backbone_Interpreter_parse_file, \
lily_backbone_Interpreter_parse_manifest_file, \
lily_backbone_Interpreter_parse_manifest_string, \
lily_backbone_Interpreter_parse_string, \
lily_backbone_Interpreter_render_file, \
lily_backbone_Interpreter_render_string, \
lily_backbone_Interpreter_set_hook, \
lily_backbone_Interpreter_validate_file, \
lily_backbone_Interpreter_validate_string, \
NULL, \
NULL, \
NULL, \
NULL, \
lily_backbone_TestCaseBase_new, \
lily_backbone_TestCaseBase_run_tests, \
NULL, \
NULL, \
NULL, \
};
#endif
| 2,379 |
1,768 | // Copyright (c) 2003 Compaq Corporation. All rights reserved.
package tla2sany;
/**
* SANY is a shell class to call the main driver method of SANY
*/
public class SANY {
public static final void main(String[] args) {
tla2sany.drivers.SANY.SANYmain(args);
}
}
| 96 |
1,735 | from .celery import app as celery_app # noqa
default_app_config = "boltstream.apps.BoltstreamAppConfig"
| 36 |
313 | <gh_stars>100-1000
/*
* Copyright 2018 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.titus.ext.cassandra.tool;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.QueryOptions;
import com.datastax.driver.core.Session;
import com.google.common.base.Preconditions;
import com.netflix.titus.common.util.StringExt;
import com.netflix.titus.ext.cassandra.executor.AsyncCassandraExecutor;
import org.apache.commons.cli.CommandLine;
public class DefaultCommandContext implements CommandContext {
private static final int PAGE_SIZE = 1000;
private static final int SPLIT = 2;
private final CommandLine commandLine;
private final Session defaultSession;
private final Optional<CassSession> sourceSession;
private final Optional<CassSession> targetSession;
public DefaultCommandContext(CommandLine commandLine,
Session defaultSession,
Function<String, Session> sourceSessionFactory,
Function<String, Session> targetSessionFactory) {
this.commandLine = commandLine;
this.defaultSession = defaultSession;
this.sourceSession = commandLine.hasOption('s')
? Optional.of(new CassSession(sourceSessionFactory, commandLine.getOptionValue('s')))
: Optional.empty();
this.targetSession = commandLine.hasOption('t')
? Optional.of(new CassSession(targetSessionFactory, commandLine.getOptionValue('t')))
: Optional.empty();
}
@Override
public void shutdown() {
}
@Override
public CommandLine getCommandLine() {
return commandLine;
}
@Override
public Session getDefaultSession() {
return defaultSession;
}
@Override
public Session getSourceSession() {
Preconditions.checkState(sourceSession.isPresent(), "Cassandra source keyspace not defined");
return sourceSession.get().getOrCreateSession();
}
@Override
public Session getTargetSession() {
Preconditions.checkState(targetSession.isPresent(), "Cassandra target keyspace not defined");
return targetSession.get().getOrCreateSession();
}
@Override
public String getSourceKeySpace() {
Preconditions.checkState(sourceSession.isPresent(), "Cassandra source keyspace not defined");
return sourceSession.get().getKeySpace();
}
@Override
public String getTargetKeySpace() {
Preconditions.checkState(targetSession.isPresent(), "Cassandra target keyspace not defined");
return targetSession.get().getKeySpace();
}
@Override
public AsyncCassandraExecutor getSourceCassandraExecutor() {
Preconditions.checkState(sourceSession.isPresent(), "Cassandra source keyspace not defined");
return sourceSession.get().getOrCreateExecutor();
}
@Override
public AsyncCassandraExecutor getTargetCassandraExecutor() {
Preconditions.checkState(targetSession.isPresent(), "Cassandra target keyspace not defined");
return targetSession.get().getOrCreateExecutor();
}
public static CommandContext newCommandContext(CommandLine commandLine) {
List<String> ips = StringExt.splitByComma(commandLine.getOptionValue("H"));
int sourcePort = Integer.parseInt(commandLine.getOptionValue("p"));
QueryOptions queryOptions = new QueryOptions()
.setConsistencyLevel(ConsistencyLevel.LOCAL_QUORUM);
Cluster cluster = Cluster.builder()
.addContactPoints((String[]) ips.toArray())
.withPort(sourcePort)
.withQueryOptions(queryOptions)
.build();
return new DefaultCommandContext(
commandLine,
cluster.newSession(),
sourceKeySpace -> cluster.connect('"' + sourceKeySpace + '"'),
targetKeySpace -> cluster.connect('"' + targetKeySpace + '"')
) {
@Override
public void shutdown() {
cluster.close();
}
};
}
class CassSession {
private final Function<String, Session> sessionFactory;
private final String keySpace;
private Session session;
private AsyncCassandraExecutor executor;
CassSession(Function<String, Session> sessionFactory, String keySpace) {
this.sessionFactory = sessionFactory;
this.keySpace = keySpace;
}
String getKeySpace() {
return keySpace;
}
Session getOrCreateSession() {
if (session == null) {
session = sessionFactory.apply(keySpace);
}
return session;
}
AsyncCassandraExecutor getOrCreateExecutor() {
if (executor == null) {
executor = createCassExecutor(getOrCreateSession());
}
return executor;
}
private AsyncCassandraExecutor createCassExecutor(Session session) {
return new AsyncCassandraExecutor(session, PAGE_SIZE, SPLIT);
}
}
}
| 2,203 |
617 | #!/usr/bin/env python
'''Wrapper for python2 and python3 around compileall to raise exception
when a python byte code generation failed.
Inspired from:
http://stackoverflow.com/questions/615632/how-to-detect-errors-from-compileall-compile-dir
'''
from __future__ import print_function
import sys
import py_compile
import compileall
import argparse
def check_for_errors(comparison):
'''Wrap comparison operator with code checking for PyCompileError.
If PyCompileError was raised, re-raise it again to abort execution,
otherwise perform comparison as expected.
'''
def operator(self, other):
exc_type, value, traceback = sys.exc_info()
if exc_type is not None and issubclass(exc_type,
py_compile.PyCompileError):
print("Cannot compile %s" % value.file)
raise value
return comparison(self, other)
return operator
class ReportProblem(int):
'''Class that pretends to be an int() object but implements all of its
comparison operators such that it'd detect being called in
PyCompileError handling context and abort execution
'''
VALUE = 1
def __new__(cls, *args, **kwargs):
return int.__new__(cls, ReportProblem.VALUE, **kwargs)
@check_for_errors
def __lt__(self, other):
return ReportProblem.VALUE < other
@check_for_errors
def __eq__(self, other):
return ReportProblem.VALUE == other
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return not self < other and not self == other
def __ne__(self, other):
return not self == other
parser = argparse.ArgumentParser(description='Compile Python source files in a directory tree.')
parser.add_argument("target", metavar='DIRECTORY',
help='Directory to scan')
parser.add_argument("--force", action='store_true',
help="Force compilation even if alread compiled")
args = parser.parse_args()
compileall.compile_dir(args.target, force=args.force, quiet=ReportProblem())
| 776 |
2,531 | #ifndef ${id upper}_H
#define ${id upper}_H
/* This generated file contains includes for project dependencies */
#include "${id dash}/bake_config.h"
#ifdef __cplusplus
extern "C" {
#endif
#ifdef __cplusplus
}
#endif
#endif
| 85 |
507 | <gh_stars>100-1000
# terrascript/resource/hashicorp/azurestack.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:13:15 UTC)
import terrascript
class azurestack_availability_set(terrascript.Resource):
pass
class azurestack_dns_a_record(terrascript.Resource):
pass
class azurestack_dns_zone(terrascript.Resource):
pass
class azurestack_lb(terrascript.Resource):
pass
class azurestack_lb_backend_address_pool(terrascript.Resource):
pass
class azurestack_lb_nat_pool(terrascript.Resource):
pass
class azurestack_lb_nat_rule(terrascript.Resource):
pass
class azurestack_lb_probe(terrascript.Resource):
pass
class azurestack_lb_rule(terrascript.Resource):
pass
class azurestack_local_network_gateway(terrascript.Resource):
pass
class azurestack_managed_disk(terrascript.Resource):
pass
class azurestack_network_interface(terrascript.Resource):
pass
class azurestack_network_security_group(terrascript.Resource):
pass
class azurestack_network_security_rule(terrascript.Resource):
pass
class azurestack_public_ip(terrascript.Resource):
pass
class azurestack_resource_group(terrascript.Resource):
pass
class azurestack_route(terrascript.Resource):
pass
class azurestack_route_table(terrascript.Resource):
pass
class azurestack_storage_account(terrascript.Resource):
pass
class azurestack_storage_blob(terrascript.Resource):
pass
class azurestack_storage_container(terrascript.Resource):
pass
class azurestack_subnet(terrascript.Resource):
pass
class azurestack_template_deployment(terrascript.Resource):
pass
class azurestack_virtual_machine(terrascript.Resource):
pass
class azurestack_virtual_machine_extension(terrascript.Resource):
pass
class azurestack_virtual_machine_scale_set(terrascript.Resource):
pass
class azurestack_virtual_network(terrascript.Resource):
pass
class azurestack_virtual_network_gateway(terrascript.Resource):
pass
class azurestack_virtual_network_gateway_connection(terrascript.Resource):
pass
__all__ = [
"azurestack_availability_set",
"azurestack_dns_a_record",
"azurestack_dns_zone",
"azurestack_lb",
"azurestack_lb_backend_address_pool",
"azurestack_lb_nat_pool",
"azurestack_lb_nat_rule",
"azurestack_lb_probe",
"azurestack_lb_rule",
"azurestack_local_network_gateway",
"azurestack_managed_disk",
"azurestack_network_interface",
"azurestack_network_security_group",
"azurestack_network_security_rule",
"azurestack_public_ip",
"azurestack_resource_group",
"azurestack_route",
"azurestack_route_table",
"azurestack_storage_account",
"azurestack_storage_blob",
"azurestack_storage_container",
"azurestack_subnet",
"azurestack_template_deployment",
"azurestack_virtual_machine",
"azurestack_virtual_machine_extension",
"azurestack_virtual_machine_scale_set",
"azurestack_virtual_network",
"azurestack_virtual_network_gateway",
"azurestack_virtual_network_gateway_connection",
]
| 1,147 |
634 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.actions.runAnything.activity;
import com.intellij.openapi.actionSystem.DataContext;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class RunAnythingCommandExecutionProvider extends RunAnythingCommandProvider {
@Nullable
@Override
public String findMatchingValue(@Nonnull DataContext dataContext, @Nonnull String pattern) {
return pattern;
}
@Nullable
@Override
public String getHelpGroupTitle() {
return null;
}
} | 187 |
308 | <reponame>yusufcakal/Modular-Architecture-Hexagonal-Demo-Project<filename>payment-api/infra/src/main/java/com/hexagonaldemo/paymentapi/adapters/balance/rest/BalanceController.java
package com.hexagonaldemo.paymentapi.adapters.balance.rest;
import com.hexagonaldemo.paymentapi.adapters.balance.rest.dto.BalanceResponse;
import com.hexagonaldemo.paymentapi.adapters.balance.rest.dto.BalanceTransactionCreateRequest;
import com.hexagonaldemo.paymentapi.balance.usecase.BalanceRetrieve;
import com.hexagonaldemo.paymentapi.balance.usecase.BalanceTransactionCreate;
import com.hexagonaldemo.paymentapi.balance.model.Balance;
import com.hexagonaldemo.paymentapi.common.usecase.UseCaseHandler;
import com.hexagonaldemo.paymentapi.common.rest.BaseController;
import com.hexagonaldemo.paymentapi.common.rest.Response;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
@Slf4j
@RestController
@RequiredArgsConstructor
@RequestMapping("/api/v1/balances")
public class BalanceController extends BaseController {
@GetMapping
@ResponseStatus(HttpStatus.OK)
public Response<BalanceResponse> retrieve(@RequestParam("accountId") Long accountId) {
var balance = publish(Balance.class, BalanceRetrieve.from(accountId));
log.info("Balance is retrieved for account {} as {}", accountId, balance);
return respond(BalanceResponse.fromModel(balance));
}
@PostMapping
@ResponseStatus(HttpStatus.OK)
public Response<BalanceResponse> addBalanceTransaction(@Valid @RequestBody BalanceTransactionCreateRequest balanceTransactionCreateRequest) {
var balance = publish(Balance.class, balanceTransactionCreateRequest.toUseCase());
log.info("Balance transaction is created as {} and balance became {}", balanceTransactionCreateRequest, balance);
return respond(BalanceResponse.fromModel(balance));
}
} | 617 |
461 | import java.util.HashMap;
import java.util.Map;
public class Solution4 {
public int numberOfBoomerangs(int[][] points) {
int res = 0;
for (int[] point : points) {
Map<Integer, Integer> map = new HashMap<>();
// 其它的点
for (int[] other : points) {
// 距离自己,可以不用管,因为自己到自己的距离为 0
int distance = distance(point, other);
if (map.containsKey(distance)) {
Integer count = map.get(distance);
// 把已经有的乘以 2
res += count;
map.put(distance, count + 1);
} else {
map.put(distance, 1);
}
}
}
return res * 2;
}
private int distance(int[] point1, int[] point2) {
int diffX = point1[0] - point2[0];
int diffY = point1[1] - point2[1];
return diffX * diffX + diffY * diffY;
}
} | 581 |
563 | // Automatically generated by the Fast Binary Encoding compiler, do not modify!
// https://github.com/chronoxor/FastBinaryEncoding
// Source: test.fbe
// Version: 1.8.0.0
package com.chronoxor.test;
public class StructSimple implements Comparable<Object>
{
public int id = 0;
public boolean f1 = false;
public boolean f2 = true;
public byte f3 = (byte)0;
public byte f4 = (byte)0xFF;
public char f5 = '\0';
public char f6 = (char)'!';
public char f7 = '\0';
public char f8 = (char)0x0444;
public byte f9 = (byte)0;
public byte f10 = (byte)127;
public byte f11 = (byte)0;
public byte f12 = (byte)0xFF;
public short f13 = (short)0;
public short f14 = (short)32767;
public short f15 = (short)0;
public short f16 = (short)0xFFFF;
public int f17 = 0;
public int f18 = (int)2147483647;
public int f19 = (int)0;
public int f20 = (int)0xFFFFFFFF;
public long f21 = 0L;
public long f22 = (long)9223372036854775807L;
public long f23 = (long)0L;
public long f24 = (long)0xFFFFFFFFFFFFFFFFL;
public float f25 = 0.0f;
public float f26 = (float)123.456f;
public double f27 = 0.0d;
public double f28 = (double)-123.456e+123d;
public java.math.BigDecimal f29 = java.math.BigDecimal.valueOf(0L);
public java.math.BigDecimal f30 = java.math.BigDecimal.valueOf(123456.123456d);
public String f31 = "";
public String f32 = "Initial string!";
public java.time.Instant f33 = java.time.Instant.EPOCH;
public java.time.Instant f34 = java.time.Instant.EPOCH;
public java.time.Instant f35 = java.time.Instant.now();
public java.util.UUID f36 = com.chronoxor.fbe.UUIDGenerator.nil();
public java.util.UUID f37 = com.chronoxor.fbe.UUIDGenerator.sequential();
public java.util.UUID f38 = java.util.UUID.fromString("123e4567-e89b-12d3-a456-426655440000");
public com.chronoxor.proto.OrderSide f39 = new com.chronoxor.proto.OrderSide();
public com.chronoxor.proto.OrderType f40 = new com.chronoxor.proto.OrderType();
public com.chronoxor.proto.Order f41 = new com.chronoxor.proto.Order();
public com.chronoxor.proto.Balance f42 = new com.chronoxor.proto.Balance();
public com.chronoxor.proto.State f43 = new com.chronoxor.proto.State();
public com.chronoxor.proto.Account f44 = new com.chronoxor.proto.Account();
public static final long fbeTypeConst = 110;
public long fbeType() { return fbeTypeConst; }
public StructSimple() {}
public StructSimple(int id, boolean f1, boolean f2, byte f3, byte f4, char f5, char f6, char f7, char f8, byte f9, byte f10, byte f11, byte f12, short f13, short f14, short f15, short f16, int f17, int f18, int f19, int f20, long f21, long f22, long f23, long f24, float f25, float f26, double f27, double f28, java.math.BigDecimal f29, java.math.BigDecimal f30, String f31, String f32, java.time.Instant f33, java.time.Instant f34, java.time.Instant f35, java.util.UUID f36, java.util.UUID f37, java.util.UUID f38, com.chronoxor.proto.OrderSide f39, com.chronoxor.proto.OrderType f40, com.chronoxor.proto.Order f41, com.chronoxor.proto.Balance f42, com.chronoxor.proto.State f43, com.chronoxor.proto.Account f44)
{
this.id = id;
this.f1 = f1;
this.f2 = f2;
this.f3 = f3;
this.f4 = f4;
this.f5 = f5;
this.f6 = f6;
this.f7 = f7;
this.f8 = f8;
this.f9 = f9;
this.f10 = f10;
this.f11 = f11;
this.f12 = f12;
this.f13 = f13;
this.f14 = f14;
this.f15 = f15;
this.f16 = f16;
this.f17 = f17;
this.f18 = f18;
this.f19 = f19;
this.f20 = f20;
this.f21 = f21;
this.f22 = f22;
this.f23 = f23;
this.f24 = f24;
this.f25 = f25;
this.f26 = f26;
this.f27 = f27;
this.f28 = f28;
this.f29 = f29;
this.f30 = f30;
this.f31 = f31;
this.f32 = f32;
this.f33 = f33;
this.f34 = f34;
this.f35 = f35;
this.f36 = f36;
this.f37 = f37;
this.f38 = f38;
this.f39 = f39;
this.f40 = f40;
this.f41 = f41;
this.f42 = f42;
this.f43 = f43;
this.f44 = f44;
}
public StructSimple(StructSimple other)
{
this.id = other.id;
this.f1 = other.f1;
this.f2 = other.f2;
this.f3 = other.f3;
this.f4 = other.f4;
this.f5 = other.f5;
this.f6 = other.f6;
this.f7 = other.f7;
this.f8 = other.f8;
this.f9 = other.f9;
this.f10 = other.f10;
this.f11 = other.f11;
this.f12 = other.f12;
this.f13 = other.f13;
this.f14 = other.f14;
this.f15 = other.f15;
this.f16 = other.f16;
this.f17 = other.f17;
this.f18 = other.f18;
this.f19 = other.f19;
this.f20 = other.f20;
this.f21 = other.f21;
this.f22 = other.f22;
this.f23 = other.f23;
this.f24 = other.f24;
this.f25 = other.f25;
this.f26 = other.f26;
this.f27 = other.f27;
this.f28 = other.f28;
this.f29 = other.f29;
this.f30 = other.f30;
this.f31 = other.f31;
this.f32 = other.f32;
this.f33 = other.f33;
this.f34 = other.f34;
this.f35 = other.f35;
this.f36 = other.f36;
this.f37 = other.f37;
this.f38 = other.f38;
this.f39 = other.f39;
this.f40 = other.f40;
this.f41 = other.f41;
this.f42 = other.f42;
this.f43 = other.f43;
this.f44 = other.f44;
}
public StructSimple clone()
{
// Serialize the struct to the FBE stream
var writer = new com.chronoxor.test.fbe.StructSimpleModel();
writer.serialize(this);
// Deserialize the struct from the FBE stream
var reader = new com.chronoxor.test.fbe.StructSimpleModel();
reader.attach(writer.getBuffer());
return reader.deserialize();
}
@Override
public int compareTo(Object other)
{
if (other == null)
return -1;
if (!StructSimple.class.isAssignableFrom(other.getClass()))
return -1;
final StructSimple obj = (StructSimple)other;
int result = 0;
result = Integer.compare(id, obj.id);
if (result != 0)
return result;
return result;
}
@Override
public boolean equals(Object other)
{
if (other == null)
return false;
if (!StructSimple.class.isAssignableFrom(other.getClass()))
return false;
final StructSimple obj = (StructSimple)other;
if (id != obj.id)
return false;
return true;
}
@Override
public int hashCode()
{
int hash = 17;
hash = hash * 31 + Integer.hashCode(id);
return hash;
}
@Override
public String toString()
{
var sb = new StringBuilder();
sb.append("StructSimple(");
sb.append("id="); sb.append(id);
sb.append(",f1="); sb.append(f1 ? "true" : "false");
sb.append(",f2="); sb.append(f2 ? "true" : "false");
sb.append(",f3="); sb.append(f3);
sb.append(",f4="); sb.append(f4);
sb.append(",f5="); sb.append("'").append(f5).append("'");
sb.append(",f6="); sb.append("'").append(f6).append("'");
sb.append(",f7="); sb.append("'").append(f7).append("'");
sb.append(",f8="); sb.append("'").append(f8).append("'");
sb.append(",f9="); sb.append(f9);
sb.append(",f10="); sb.append(f10);
sb.append(",f11="); sb.append(f11);
sb.append(",f12="); sb.append(f12);
sb.append(",f13="); sb.append(f13);
sb.append(",f14="); sb.append(f14);
sb.append(",f15="); sb.append(f15);
sb.append(",f16="); sb.append(f16);
sb.append(",f17="); sb.append(f17);
sb.append(",f18="); sb.append(f18);
sb.append(",f19="); sb.append(f19);
sb.append(",f20="); sb.append(f20);
sb.append(",f21="); sb.append(f21);
sb.append(",f22="); sb.append(f22);
sb.append(",f23="); sb.append(f23);
sb.append(",f24="); sb.append(f24);
sb.append(",f25="); sb.append(f25);
sb.append(",f26="); sb.append(f26);
sb.append(",f27="); sb.append(f27);
sb.append(",f28="); sb.append(f28);
sb.append(",f29="); if (f29 != null) sb.append(f29); else sb.append("null");
sb.append(",f30="); if (f30 != null) sb.append(f30); else sb.append("null");
sb.append(",f31="); if (f31 != null) sb.append("\"").append(f31).append("\""); else sb.append("null");
sb.append(",f32="); if (f32 != null) sb.append("\"").append(f32).append("\""); else sb.append("null");
sb.append(",f33="); if (f33 != null) sb.append(f33.getEpochSecond() * 1000000000 + f33.getNano()); else sb.append("null");
sb.append(",f34="); if (f34 != null) sb.append(f34.getEpochSecond() * 1000000000 + f34.getNano()); else sb.append("null");
sb.append(",f35="); if (f35 != null) sb.append(f35.getEpochSecond() * 1000000000 + f35.getNano()); else sb.append("null");
sb.append(",f36="); if (f36 != null) sb.append("\"").append(f36).append("\""); else sb.append("null");
sb.append(",f37="); if (f37 != null) sb.append("\"").append(f37).append("\""); else sb.append("null");
sb.append(",f38="); if (f38 != null) sb.append("\"").append(f38).append("\""); else sb.append("null");
sb.append(",f39="); sb.append(f39);
sb.append(",f40="); sb.append(f40);
sb.append(",f41="); sb.append(f41);
sb.append(",f42="); sb.append(f42);
sb.append(",f43="); sb.append(f43);
sb.append(",f44="); sb.append(f44);
sb.append(")");
return sb.toString();
}
public String toJson() { return com.chronoxor.test.fbe.Json.getEngine().toJson(this); }
public static StructSimple fromJson(String json) { return com.chronoxor.test.fbe.Json.getEngine().fromJson(json, StructSimple.class); }
}
| 5,040 |
320 | <reponame>yvesjores/AndroidSensorsProgramming<filename>app/src/root/gast/playground/sensor/SensorDisplayFragment.java
/*
* Copyright 2012 <NAME> and <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package root.gast.playground.sensor;
import root.gast.playground.BuildConfig;
import root.gast.playground.R;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.TextView;
/**
* Displays the details of a sensor.
*
* @author <NAME> <<a href="mailto:<EMAIL>"><EMAIL></a>>
* @author <NAME> <<a href="mailto:<EMAIL>"><EMAIL></a>>
*/
public class SensorDisplayFragment extends Fragment implements SensorEventListener
{
private static final String TAG = "SensorDisplayFragment";
private static final String THETA = "\u0398";
private static final String ACCELERATION_UNITS = "m/s\u00B2";
private SensorManager sensorManager;
private Sensor sensor;
private TextView name;
private TextView type;
private TextView maxRange;
private TextView minDelay;
private TextView power;
private TextView resolution;
private TextView vendor;
private TextView version;
private TextView accuracy;
private TextView timestampLabel;
private TextView timestamp;
private TextView timestampUnits;
private TextView dataLabel;
private TextView dataUnits;
private TextView xAxis;
private TextView xAxisLabel;
private TextView yAxis;
private TextView yAxisLabel;
private TextView zAxis;
private TextView zAxisLabel;
private TextView singleValue;
private TextView cosLabel;
private TextView cos;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState)
{
View layout = inflater.inflate(R.layout.sensor_view, null);
sensorManager =
(SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE);
name = (TextView) layout.findViewById(R.id.name);
type = (TextView) layout.findViewById(R.id.type);
maxRange = (TextView) layout.findViewById(R.id.maxRange);
minDelay = (TextView) layout.findViewById(R.id.minDelay);
power = (TextView) layout.findViewById(R.id.power);
resolution = (TextView) layout.findViewById(R.id.resolution);
vendor = (TextView) layout.findViewById(R.id.vendor);
version = (TextView) layout.findViewById(R.id.version);
accuracy = (TextView) layout.findViewById(R.id.accuracy);
timestampLabel = (TextView) layout.findViewById(R.id.timestampLabel);
timestamp = (TextView) layout.findViewById(R.id.timestamp);
timestampUnits = (TextView) layout.findViewById(R.id.timestampUnits);
dataLabel = (TextView) layout.findViewById(R.id.dataLabel);
dataUnits = (TextView) layout.findViewById(R.id.dataUnits);
xAxis = (TextView) layout.findViewById(R.id.xAxis);
xAxisLabel = (TextView) layout.findViewById(R.id.xAxisLabel);
yAxis = (TextView) layout.findViewById(R.id.yAxis);
yAxisLabel = (TextView) layout.findViewById(R.id.yAxisLabel);
zAxis = (TextView) layout.findViewById(R.id.zAxis);
zAxisLabel = (TextView) layout.findViewById(R.id.zAxisLabel);
singleValue = (TextView) layout.findViewById(R.id.singleValue);
cosLabel = (TextView) layout.findViewById(R.id.cosLabel);
cos = (TextView) layout.findViewById(R.id.cos);
layout.findViewById(R.id.delayFastest).setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
sensorManager.unregisterListener(SensorDisplayFragment.this);
sensorManager.registerListener(SensorDisplayFragment.this,
sensor,
SensorManager.SENSOR_DELAY_FASTEST);
}
});
layout.findViewById(R.id.delayGame).setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
sensorManager.unregisterListener(SensorDisplayFragment.this);
sensorManager.registerListener(SensorDisplayFragment.this,
sensor,
SensorManager.SENSOR_DELAY_GAME);
}
});
layout.findViewById(R.id.delayNormal).setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
sensorManager.unregisterListener(SensorDisplayFragment.this);
sensorManager.registerListener(SensorDisplayFragment.this,
sensor,
SensorManager.SENSOR_DELAY_NORMAL);
}
});
layout.findViewById(R.id.delayUi).setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
sensorManager.unregisterListener(SensorDisplayFragment.this);
sensorManager.registerListener(SensorDisplayFragment.this,
sensor,
SensorManager.SENSOR_DELAY_UI);
}
});
return layout;
}
public void displaySensor(Sensor sensor)
{
if (BuildConfig.DEBUG)
{
Log.d(TAG, "display the sensor");
}
this.sensor = sensor;
name.setText(sensor.getName());
type.setText(String.valueOf(sensor.getType()));
maxRange.setText(String.valueOf(sensor.getMaximumRange()));
minDelay.setText(String.valueOf(sensor.getMinDelay()));
power.setText(String.valueOf(sensor.getPower()));
resolution.setText(String.valueOf(sensor.getResolution()));
vendor.setText(String.valueOf(sensor.getVendor()));
version.setText(String.valueOf(sensor.getVersion()));
sensorManager.registerListener(this,
sensor,
SensorManager.SENSOR_DELAY_NORMAL);
}
/**
* @see android.hardware.SensorEventListener#onAccuracyChanged(android.hardware.Sensor, int)
*/
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy)
{
switch(accuracy)
{
case SensorManager.SENSOR_STATUS_ACCURACY_HIGH:
this.accuracy.setText("SENSOR_STATUS_ACCURACY_HIGH");
break;
case SensorManager.SENSOR_STATUS_ACCURACY_MEDIUM:
this.accuracy.setText("SENSOR_STATUS_ACCURACY_MEDIUM");
break;
case SensorManager.SENSOR_STATUS_ACCURACY_LOW:
this.accuracy.setText("SENSOR_STATUS_ACCURACY_LOW");
break;
case SensorManager.SENSOR_STATUS_UNRELIABLE:
this.accuracy.setText("SENSOR_STATUS_UNRELIABLE");
break;
}
}
/**
* @see android.hardware.SensorEventListener#onSensorChanged(android.hardware.SensorEvent)
*/
@Override
public void onSensorChanged(SensorEvent event)
{
onAccuracyChanged(event.sensor, event.accuracy);
timestampLabel.setVisibility(View.VISIBLE);
timestamp.setVisibility(View.VISIBLE);
timestamp.setText(String.valueOf(event.timestamp));
timestampUnits.setVisibility(View.VISIBLE);
switch (event.sensor.getType())
{
case Sensor.TYPE_ACCELEROMETER:
showEventData("Acceleration - gravity on axis",
ACCELERATION_UNITS,
event.values[0],
event.values[1],
event.values[2]);
break;
case Sensor.TYPE_MAGNETIC_FIELD:
showEventData("Abient Magnetic Field",
"uT",
event.values[0],
event.values[1],
event.values[2]);
break;
case Sensor.TYPE_GYROSCOPE:
showEventData("Angular speed around axis",
"radians/sec",
event.values[0],
event.values[1],
event.values[2]);
break;
case Sensor.TYPE_LIGHT:
showEventData("Ambient light",
"lux",
event.values[0]);
break;
case Sensor.TYPE_PRESSURE:
showEventData("Atmospheric pressure",
"hPa",
event.values[0]);
break;
case Sensor.TYPE_PROXIMITY:
showEventData("Distance",
"cm",
event.values[0]);
break;
case Sensor.TYPE_GRAVITY:
showEventData("Gravity",
ACCELERATION_UNITS,
event.values[0],
event.values[1],
event.values[2]);
break;
case Sensor.TYPE_LINEAR_ACCELERATION:
showEventData("Acceleration (not including gravity)",
ACCELERATION_UNITS,
event.values[0],
event.values[1],
event.values[2]);
break;
case Sensor.TYPE_ROTATION_VECTOR:
showEventData("Rotation Vector",
null,
event.values[0],
event.values[1],
event.values[2]);
xAxisLabel.setText("x*sin(" + THETA + "/2)");
yAxisLabel.setText("y*sin(" + THETA + "/2)");
zAxisLabel.setText("z*sin(" + THETA + "/2)");
if (event.values.length == 4)
{
cosLabel.setVisibility(View.VISIBLE);
cos.setVisibility(View.VISIBLE);
cos.setText(String.valueOf(event.values[3]));
}
break;
case Sensor.TYPE_ORIENTATION:
showEventData("Angle",
"Degrees",
event.values[0],
event.values[1],
event.values[2]);
xAxisLabel.setText(R.string.azimuthLabel);
yAxisLabel.setText(R.string.pitchLabel);
zAxisLabel.setText(R.string.rollLabel);
break;
case Sensor.TYPE_RELATIVE_HUMIDITY:
showEventData("Relatice ambient air humidity",
"%",
event.values[0]);
break;
case Sensor.TYPE_AMBIENT_TEMPERATURE:
showEventData("Ambien temperature",
"degree Celcius",
event.values[0]);
break;
}
}
private void showEventData(String label, String units, float x, float y, float z)
{
dataLabel.setVisibility(View.VISIBLE);
dataLabel.setText(label);
if (units == null)
{
dataUnits.setVisibility(View.GONE);
}
else
{
dataUnits.setVisibility(View.VISIBLE);
dataUnits.setText("(" + units + "):");
}
singleValue.setVisibility(View.GONE);
xAxisLabel.setVisibility(View.VISIBLE);
xAxisLabel.setText(R.string.xAxisLabel);
xAxis.setVisibility(View.VISIBLE);
xAxis.setText(String.valueOf(x));
yAxisLabel.setVisibility(View.VISIBLE);
yAxisLabel.setText(R.string.yAxisLabel);
yAxis.setVisibility(View.VISIBLE);
yAxis.setText(String.valueOf(y));
zAxisLabel.setVisibility(View.VISIBLE);
zAxisLabel.setText(R.string.zAxisLabel);
zAxis.setVisibility(View.VISIBLE);
zAxis.setText(String.valueOf(z));
}
private void showEventData(String label, String units, float value)
{
dataLabel.setVisibility(View.VISIBLE);
dataLabel.setText(label);
dataUnits.setVisibility(View.VISIBLE);
dataUnits.setText("(" + units + "):");
singleValue.setVisibility(View.VISIBLE);
singleValue.setText(String.valueOf(value));
xAxisLabel.setVisibility(View.GONE);
xAxis.setVisibility(View.GONE);
yAxisLabel.setVisibility(View.GONE);
yAxis.setVisibility(View.GONE);
zAxisLabel.setVisibility(View.GONE);
zAxis.setVisibility(View.GONE);
}
/**
* @see android.support.v4.app.Fragment#onHiddenChanged(boolean)
*/
@Override
public void onHiddenChanged(boolean hidden)
{
super.onHiddenChanged(hidden);
if (hidden)
{
if (BuildConfig.DEBUG)
{
Log.d(TAG, "Unregistering listener");
}
sensorManager.unregisterListener(this);
}
}
/**
* @see android.support.v4.app.Fragment#onPause()
*/
@Override
public void onPause()
{
super.onPause();
if (BuildConfig.DEBUG)
{
Log.d(TAG, "onPause");
Log.d(TAG, "Unregistering listener");
}
sensorManager.unregisterListener(this);
}
}
| 7,326 |
384 |
package org.holoeverywhere.app;
import android.content.Context;
import android.content.DialogInterface;
import android.database.Cursor;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Message;
import android.util.TypedValue;
import android.view.KeyEvent;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListAdapter;
import org.holoeverywhere.R;
import org.holoeverywhere.widget.AlertController;
import org.holoeverywhere.widget.AlertController.AlertDecorViewInstaller;
import org.holoeverywhere.widget.AlertController.AlertParams;
import org.holoeverywhere.widget.AlertController.AlertParams.OnPrepareListViewListener;
import org.holoeverywhere.widget.Button;
import org.holoeverywhere.widget.ListView;
public class AlertDialog extends Dialog implements DialogInterface,
AlertDecorViewInstaller {
public static class Builder {
private static final Class<?>[] CONSTRUCTOR_SIGNATURE = {
Context.class, int.class
};
private Class<? extends AlertDialog> mDialogClass;
private final AlertController.AlertParams mParams;
public Builder(Context context) {
this(context, 0);
}
public Builder(Context context, int theme) {
mParams = new AlertParams(context, theme);
}
public Builder addButtonBehavior(int buttonBehavior) {
mParams.mButtonBehavior |= buttonBehavior;
return this;
}
public AlertDialog create() {
AlertDialog dialog = null;
if (mDialogClass != null) {
try {
dialog = mDialogClass.getConstructor(CONSTRUCTOR_SIGNATURE).newInstance(
mParams.mContext, mParams.mTheme);
} catch (Exception e) {
e.printStackTrace();
}
}
if (dialog == null) {
dialog = new AlertDialog(mParams.mContext, mParams.mTheme);
}
mParams.apply(dialog.mAlert);
dialog.setCancelable(mParams.mCancelable);
if (mParams.mCancelable) {
dialog.setCanceledOnTouchOutside(true);
}
if (mParams.mOnCancelListener != null) {
dialog.setOnCancelListener(mParams.mOnCancelListener);
}
if (mParams.mOnKeyListener != null) {
dialog.setOnKeyListener(mParams.mOnKeyListener);
}
if (mParams.mOnDismissListener != null) {
dialog.setOnDismissListener(mParams.mOnDismissListener);
}
return dialog;
}
public Context getContext() {
return mParams.mContext;
}
public Builder removeButtonBehavior(int buttonBehavior) {
mParams.mButtonBehavior |= buttonBehavior;
mParams.mButtonBehavior ^= buttonBehavior;
return this;
}
public Builder setAdapter(final ListAdapter adapter,
final OnClickListener listener) {
mParams.mAdapter = adapter;
mParams.mOnClickListener = listener;
return this;
}
public Builder setAlertDialogClass(Class<? extends AlertDialog> clazz) {
mDialogClass = clazz;
return this;
}
public Builder setBlockDismiss(boolean blockDismiss) {
return setButtonBehavior(blockDismiss ? 0 : DISMISS_ON_ALL);
}
public Builder setButtonBehavior(int buttonBehavior) {
mParams.mButtonBehavior = buttonBehavior;
return this;
}
public Builder setCancelable(boolean cancelable) {
mParams.mCancelable = cancelable;
return this;
}
public Builder setCheckedItem(int checkedItem) {
mParams.mCheckedItem = checkedItem;
return this;
}
public Builder setCursor(final Cursor cursor,
final OnClickListener listener, String labelColumn) {
mParams.mCursor = cursor;
mParams.mLabelColumn = labelColumn;
mParams.mOnClickListener = listener;
return this;
}
public Builder setCustomTitle(View customTitleView) {
mParams.mCustomTitleView = customTitleView;
return this;
}
public Builder setIcon(Drawable icon) {
mParams.mIcon = icon;
return this;
}
public Builder setIcon(int iconId) {
mParams.mIconId = iconId;
return this;
}
public Builder setIconAttribute(int attrId) {
TypedValue out = new TypedValue();
mParams.mContext.getTheme().resolveAttribute(attrId, out, true);
mParams.mIconId = out.resourceId;
return this;
}
public Builder setInverseBackgroundForced(boolean useInverseBackground) {
mParams.mForceInverseBackground = useInverseBackground;
return this;
}
public Builder setItems(CharSequence[] items,
final OnClickListener listener) {
mParams.mItems = items;
mParams.mOnClickListener = listener;
return this;
}
public Builder setItems(int itemsId, final OnClickListener listener) {
mParams.mItems = mParams.mContext.getResources().getTextArray(itemsId);
mParams.mOnClickListener = listener;
return this;
}
public Builder setMessage(CharSequence message) {
mParams.mMessage = message;
return this;
}
public Builder setMessage(int messageId) {
mParams.mMessage = mParams.mContext.getText(messageId);
return this;
}
public Builder setMultiChoiceItems(CharSequence[] items,
boolean[] checkedItems,
final OnMultiChoiceClickListener listener) {
mParams.mItems = items;
mParams.mOnCheckboxClickListener = listener;
mParams.mCheckedItems = checkedItems;
mParams.mIsMultiChoice = true;
return this;
}
public Builder setMultiChoiceItems(Cursor cursor,
String isCheckedColumn, String labelColumn,
final OnMultiChoiceClickListener listener) {
mParams.mCursor = cursor;
mParams.mOnCheckboxClickListener = listener;
mParams.mIsCheckedColumn = isCheckedColumn;
mParams.mLabelColumn = labelColumn;
mParams.mIsMultiChoice = true;
return this;
}
public Builder setMultiChoiceItems(int itemsId, boolean[] checkedItems,
final OnMultiChoiceClickListener listener) {
mParams.mItems = mParams.mContext.getResources().getTextArray(itemsId);
mParams.mOnCheckboxClickListener = listener;
mParams.mCheckedItems = checkedItems;
mParams.mIsMultiChoice = true;
return this;
}
public Builder setNegativeButton(CharSequence text,
final OnClickListener listener) {
mParams.mNegativeButtonText = text;
mParams.mNegativeButtonListener = listener;
return this;
}
public Builder setNegativeButton(int textId,
final OnClickListener listener) {
mParams.mNegativeButtonText = mParams.mContext.getText(textId);
mParams.mNegativeButtonListener = listener;
return this;
}
public Builder setNeutralButton(CharSequence text,
final OnClickListener listener) {
mParams.mNeutralButtonText = text;
mParams.mNeutralButtonListener = listener;
return this;
}
public Builder setNeutralButton(int textId,
final OnClickListener listener) {
mParams.mNeutralButtonText = mParams.mContext.getText(textId);
mParams.mNeutralButtonListener = listener;
return this;
}
public Builder setOnCancelListener(OnCancelListener onCancelListener) {
mParams.mOnCancelListener = onCancelListener;
return this;
}
public Builder setOnDismissListener(OnDismissListener onDismissListener) {
mParams.mOnDismissListener = onDismissListener;
return this;
}
public Builder setOnItemSelectedListener(
final AdapterView.OnItemSelectedListener listener) {
mParams.mOnItemSelectedListener = listener;
return this;
}
public Builder setOnKeyListener(OnKeyListener onKeyListener) {
mParams.mOnKeyListener = onKeyListener;
return this;
}
public Builder setOnPrepareListViewListener(
OnPrepareListViewListener listener) {
mParams.mOnPrepareListViewListener = listener;
return this;
}
public Builder setPositiveButton(CharSequence text,
final OnClickListener listener) {
mParams.mPositiveButtonText = text;
mParams.mPositiveButtonListener = listener;
return this;
}
public Builder setPositiveButton(int textId,
final OnClickListener listener) {
mParams.mPositiveButtonText = mParams.mContext.getText(textId);
mParams.mPositiveButtonListener = listener;
return this;
}
public Builder setSingleChoiceItems(CharSequence[] items,
int checkedItem, final OnClickListener listener) {
mParams.mItems = items;
mParams.mOnClickListener = listener;
mParams.mCheckedItem = checkedItem;
mParams.mIsSingleChoice = true;
return this;
}
public Builder setSingleChoiceItems(Cursor cursor, int checkedItem,
String labelColumn, final OnClickListener listener) {
mParams.mCursor = cursor;
mParams.mOnClickListener = listener;
mParams.mCheckedItem = checkedItem;
mParams.mLabelColumn = labelColumn;
mParams.mIsSingleChoice = true;
return this;
}
public Builder setSingleChoiceItems(int itemsId, int checkedItem,
final OnClickListener listener) {
mParams.mItems = mParams.mContext.getResources().getTextArray(itemsId);
mParams.mOnClickListener = listener;
mParams.mCheckedItem = checkedItem;
mParams.mIsSingleChoice = true;
return this;
}
public Builder setSingleChoiceItems(ListAdapter adapter,
int checkedItem, final OnClickListener listener) {
mParams.mAdapter = adapter;
mParams.mOnClickListener = listener;
mParams.mCheckedItem = checkedItem;
mParams.mIsSingleChoice = true;
return this;
}
public Builder setTheme(int theme) {
mParams.mTheme = theme;
return this;
}
public Builder setTitle(CharSequence title) {
mParams.mTitle = title;
return this;
}
public Builder setTitle(int titleId) {
mParams.mTitle = mParams.mContext.getText(titleId);
return this;
}
public Builder setView(View view) {
mParams.mView = view;
mParams.mViewSpacingSpecified = false;
return this;
}
public Builder setView(View view, int viewSpacingLeft,
int viewSpacingTop, int viewSpacingRight, int viewSpacingBottom) {
mParams.mView = view;
mParams.mViewSpacingSpecified = true;
mParams.mViewSpacingLeft = viewSpacingLeft;
mParams.mViewSpacingTop = viewSpacingTop;
mParams.mViewSpacingRight = viewSpacingRight;
mParams.mViewSpacingBottom = viewSpacingBottom;
return this;
}
public AlertDialog show() {
AlertDialog dialog = create();
dialog.show();
return dialog;
}
}
public static final int DISMISS_ON_ALL = 7; // DO_NEG | DO_NEU | DO_POS
public static final int DISMISS_ON_NEGATIVE = 1 << 0; // -BUTTON_NEGATIVE;
public static final int DISMISS_ON_NEUTRAL = 1 << 1; // -BUTTON_NEUTRAL;
public static final int DISMISS_ON_POSITIVE = 1 << 2; // -BUTTON_POSITIVE;
public static final int THEME_HOLO_DARK = 1;
public static final int THEME_HOLO_LIGHT = 2;
static int resolveDialogTheme(Context context, int resid) {
if (resid == AlertDialog.THEME_HOLO_DARK) {
return R.style.Holo_Theme_Dialog_Alert;
} else if (resid == AlertDialog.THEME_HOLO_LIGHT) {
return R.style.Holo_Theme_Dialog_Alert_Light;
} else if (resid >= 0x01000000) {
return resid;
} else {
TypedValue outValue = new TypedValue();
context.getTheme().resolveAttribute(R.attr.alertDialogTheme, outValue, true);
return outValue.resourceId;
}
}
private final AlertController mAlert;
protected AlertDialog(Context context) {
this(context, true, null, 0);
}
protected AlertDialog(Context context, boolean cancelable,
OnCancelListener cancelListener) {
this(context, cancelable, cancelListener, 0);
}
protected AlertDialog(Context context, boolean cancelable,
OnCancelListener cancelListener, int theme) {
super(context, AlertDialog.resolveDialogTheme(context, theme));
setCancelable(cancelable);
setOnCancelListener(cancelListener);
mAlert = new AlertController(getContext(), this, getWindow(), this);
}
protected AlertDialog(Context context, int theme) {
this(context, true, null, theme);
}
public Button getButton(int whichButton) {
return mAlert.getButton(whichButton);
}
public ListView getListView() {
return mAlert.getListView();
}
@Override
public void installDecorView(Context context, int layout) {
setContentView(layout);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mAlert.installContent();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (mAlert.onKeyDown(keyCode, event)) {
return true;
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
if (mAlert.onKeyUp(keyCode, event)) {
return true;
}
return super.onKeyUp(keyCode, event);
}
@Deprecated
public void setButton(CharSequence text, Message msg) {
setButton(DialogInterface.BUTTON_POSITIVE, text, msg);
}
@Deprecated
public void setButton(CharSequence text, final OnClickListener listener) {
setButton(DialogInterface.BUTTON_POSITIVE, text, listener);
}
public void setButton(int whichButton, CharSequence text, Message msg) {
mAlert.setButton(whichButton, text, null, msg);
}
public void setButton(int whichButton, CharSequence text,
OnClickListener listener) {
mAlert.setButton(whichButton, text, listener, null);
}
@Deprecated
public void setButton2(CharSequence text, Message msg) {
setButton(DialogInterface.BUTTON_NEGATIVE, text, msg);
}
@Deprecated
public void setButton2(CharSequence text, final OnClickListener listener) {
setButton(DialogInterface.BUTTON_NEGATIVE, text, listener);
}
@Deprecated
public void setButton3(CharSequence text, Message msg) {
setButton(DialogInterface.BUTTON_NEUTRAL, text, msg);
}
@Deprecated
public void setButton3(CharSequence text, final OnClickListener listener) {
setButton(DialogInterface.BUTTON_NEUTRAL, text, listener);
}
public void setButtonBehavior(int buttonBehavior) {
mAlert.setButtonBehavior(buttonBehavior);
}
public void setCustomTitle(View customTitleView) {
mAlert.setCustomTitle(customTitleView);
}
public void setIcon(Drawable icon) {
mAlert.setIcon(icon);
}
public void setIcon(int resId) {
mAlert.setIcon(resId);
}
public void setIconAttribute(int attrId) {
TypedValue out = new TypedValue();
getContext().getTheme().resolveAttribute(attrId, out, true);
mAlert.setIcon(out.resourceId);
}
public void setInverseBackgroundForced(boolean forceInverseBackground) {
mAlert.setInverseBackgroundForced(forceInverseBackground);
}
public void setMessage(CharSequence message) {
mAlert.setMessage(message);
}
@Override
public void setTitle(CharSequence title) {
super.setTitle(title);
mAlert.setTitle(title);
}
public void setView(View view) {
mAlert.setView(view);
}
public void setView(View view, int viewSpacingLeft, int viewSpacingTop,
int viewSpacingRight, int viewSpacingBottom) {
mAlert.setView(view, viewSpacingLeft, viewSpacingTop, viewSpacingRight,
viewSpacingBottom);
}
}
| 8,238 |
913 | /**
* CANopen Global fail-safe command protocol.
*
* @file CO_GFC.c
* @ingroup CO_GFC
* @author <NAME>
* @copyright 2020 - 2020 <NAME>
*
* This file is part of CANopenNode, an opensource CANopen Stack.
* Project home page is <https://github.com/CANopenNode/CANopenNode>.
* For more information on CANopen see <http://www.can-cia.org/>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "304/CO_GFC.h"
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_ENABLE
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_CONSUMER
static void CO_GFC_receive(void *object, void *msg)
{
CO_GFC_t *GFC;
uint8_t DLC = CO_CANrxMsg_readDLC(msg);
GFC = (CO_GFC_t *)
object; /* this is the correct pointer type of the first argument */
if ((*GFC->valid == 0x01) && (DLC == 0)) {
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_CONSUMER
/* Optional signal to RTOS, which can resume task, which handles SRDO.
*/
if (GFC->pFunctSignalSafe != NULL) {
GFC->pFunctSignalSafe(GFC->functSignalObjectSafe);
}
#endif
}
}
void CO_GFC_initCallbackEnterSafeState(CO_GFC_t *GFC,
void *object,
void (*pFunctSignalSafe)(void *object))
{
if (GFC != NULL) {
GFC->functSignalObjectSafe = object;
GFC->pFunctSignalSafe = pFunctSignalSafe;
}
}
#endif
CO_ReturnError_t CO_GFC_init(CO_GFC_t *GFC,
uint8_t *valid,
CO_CANmodule_t *GFC_CANdevRx,
uint16_t GFC_rxIdx,
uint16_t CANidRxGFC,
CO_CANmodule_t *GFC_CANdevTx,
uint16_t GFC_txIdx,
uint16_t CANidTxGFC)
{
if (GFC == NULL || valid == NULL || GFC_CANdevRx == NULL ||
GFC_CANdevTx == NULL) {
return CO_ERROR_ILLEGAL_ARGUMENT;
}
GFC->valid = valid;
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_PRODUCER
GFC->CANdevTx = GFC_CANdevTx;
#endif
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_CONSUMER
GFC->functSignalObjectSafe = NULL;
GFC->pFunctSignalSafe = NULL;
#endif
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_PRODUCER
GFC->CANtxBuff = CO_CANtxBufferInit(
GFC->CANdevTx, /* CAN device */
GFC_txIdx, /* index of specific buffer inside CAN module */
CANidTxGFC, /* CAN identifier */
0, /* rtr */
0, /* number of data bytes */
0); /* synchronous message flag bit */
if (GFC->CANtxBuff == NULL) {
return CO_ERROR_TX_UNCONFIGURED;
}
#else
(void)GFC_txIdx; /* unused */
(void)CANidTxGFC; /* unused */
#endif
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_CONSUMER
const CO_ReturnError_t r = CO_CANrxBufferInit(
GFC_CANdevRx, /* CAN device */
GFC_rxIdx, /* rx buffer index */
CANidRxGFC, /* CAN identifier */
0x7FF, /* mask */
0, /* rtr */
(void *)GFC, /* object passed to receive function */
CO_GFC_receive); /* this function will process received message */
if (r != CO_ERROR_NO) {
return r;
}
#else
(void)GFC_rxIdx; /* unused */
(void)CANidRxGFC; /* unused */
#endif
return CO_ERROR_NO;
}
#if (CO_CONFIG_GFC) & CO_CONFIG_GFC_PRODUCER
CO_ReturnError_t CO_GFCsend(CO_GFC_t *GFC)
{
if (*GFC->valid == 0x01)
return CO_CANsend(GFC->CANdevTx, GFC->CANtxBuff);
return CO_ERROR_NO;
}
#endif
#endif /* (CO_CONFIG_GFC) & CO_CONFIG_GFC_ENABLE */
| 2,006 |
334 | // Auto generated code, do not modify
package nxt.http.callers;
public class DgsListingCall extends CreateTransactionCallBuilder<DgsListingCall> {
private DgsListingCall() {
super(ApiSpec.dgsListing);
}
public static DgsListingCall create() {
return new DgsListingCall();
}
public DgsListingCall priceNQT(long priceNQT) {
return param("priceNQT", priceNQT);
}
public DgsListingCall quantity(String quantity) {
return param("quantity", quantity);
}
public DgsListingCall name(String name) {
return param("name", name);
}
public DgsListingCall description(String description) {
return param("description", description);
}
public DgsListingCall tags(String tags) {
return param("tags", tags);
}
public DgsListingCall messageFile(byte[] b) {
return parts("messageFile", b);
}
}
| 346 |
4,036 | <reponame>madhurimamandal/codeql
// Generated automatically from android.os.Handler for testing purposes
package android.os;
import android.os.Looper;
import android.os.Message;
import android.util.Printer;
public class Handler
{
public Handler(){}
public Handler(Handler.Callback p0){}
public Handler(Looper p0){}
public Handler(Looper p0, Handler.Callback p1){}
public String getMessageName(Message p0){ return null; }
public String toString(){ return null; }
public boolean sendMessageAtTime(Message p0, long p1){ return false; }
public final Looper getLooper(){ return null; }
public final Message obtainMessage(){ return null; }
public final Message obtainMessage(int p0){ return null; }
public final Message obtainMessage(int p0, Object p1){ return null; }
public final Message obtainMessage(int p0, int p1, int p2){ return null; }
public final Message obtainMessage(int p0, int p1, int p2, Object p3){ return null; }
public final boolean hasCallbacks(Runnable p0){ return false; }
public final boolean hasMessages(int p0){ return false; }
public final boolean hasMessages(int p0, Object p1){ return false; }
public final boolean post(Runnable p0){ return false; }
public final boolean postAtFrontOfQueue(Runnable p0){ return false; }
public final boolean postAtTime(Runnable p0, Object p1, long p2){ return false; }
public final boolean postAtTime(Runnable p0, long p1){ return false; }
public final boolean postDelayed(Runnable p0, Object p1, long p2){ return false; }
public final boolean postDelayed(Runnable p0, long p1){ return false; }
public final boolean sendEmptyMessage(int p0){ return false; }
public final boolean sendEmptyMessageAtTime(int p0, long p1){ return false; }
public final boolean sendEmptyMessageDelayed(int p0, long p1){ return false; }
public final boolean sendMessage(Message p0){ return false; }
public final boolean sendMessageAtFrontOfQueue(Message p0){ return false; }
public final boolean sendMessageDelayed(Message p0, long p1){ return false; }
public final void dump(Printer p0, String p1){}
public final void removeCallbacks(Runnable p0){}
public final void removeCallbacks(Runnable p0, Object p1){}
public final void removeCallbacksAndMessages(Object p0){}
public final void removeMessages(int p0){}
public final void removeMessages(int p0, Object p1){}
public static Handler createAsync(Looper p0){ return null; }
public static Handler createAsync(Looper p0, Handler.Callback p1){ return null; }
public void dispatchMessage(Message p0){}
public void handleMessage(Message p0){}
static public interface Callback
{
boolean handleMessage(Message p0);
}
}
| 872 |
2,151 | <filename>chrome/browser/page_load_metrics/page_load_metrics_util.cc
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/page_load_metrics/page_load_metrics_util.h"
#include <algorithm>
#include "chrome/common/page_load_metrics/page_load_timing.h"
namespace page_load_metrics {
namespace {
bool IsBackgroundAbort(const PageLoadExtraInfo& info) {
if (!info.started_in_foreground || !info.first_background_time)
return false;
if (!info.page_end_time)
return true;
return info.first_background_time <= info.page_end_time;
}
PageAbortReason GetAbortReasonForEndReason(PageEndReason end_reason) {
switch (end_reason) {
case END_RELOAD:
return ABORT_RELOAD;
case END_FORWARD_BACK:
return ABORT_FORWARD_BACK;
case END_NEW_NAVIGATION:
return ABORT_NEW_NAVIGATION;
case END_STOP:
return ABORT_STOP;
case END_CLOSE:
return ABORT_CLOSE;
case END_OTHER:
return ABORT_OTHER;
default:
return ABORT_NONE;
}
}
// Common helper for QueryContainsComponent and QueryContainsComponentPrefix.
bool QueryContainsComponentHelper(const base::StringPiece query,
const base::StringPiece component,
bool component_is_prefix) {
if (query.empty() || component.empty() ||
component.length() > query.length()) {
return false;
}
// Ensures that the first character of |query| is not a query or fragment
// delimiter character (? or #). Including it can break the later test for
// |component| being at the start of the query string.
// Note: This heuristic can cause a component string that starts with one of
// these characters to not match a query string which contains it at the
// beginning.
const base::StringPiece trimmed_query =
base::TrimString(query, "?#", base::TrimPositions::TRIM_LEADING);
// We shouldn't try to find matches beyond the point where there aren't enough
// characters left in query to fully match the component.
const size_t last_search_start = trimmed_query.length() - component.length();
// We need to search for matches in a loop, rather than stopping at the first
// match, because we may initially match a substring that isn't a full query
// string component. Consider, for instance, the query string 'ab=cd&b=c'. If
// we search for component 'b=c', the first substring match will be characters
// 1-3 (zero-based) in the query string. However, this isn't a full component
// (the full component is ab=cd) so the match will fail. Thus, we must
// continue our search to find the second substring match, which in the
// example is at characters 6-8 (the end of the query string) and is a
// successful component match.
for (size_t start_offset = 0; start_offset <= last_search_start;
start_offset += component.length()) {
start_offset = trimmed_query.find(component, start_offset);
if (start_offset == std::string::npos) {
// We searched to end of string and did not find a match.
return false;
}
// Verify that the character prior to the component is valid (either we're
// at the beginning of the query string, or are preceded by an ampersand).
if (start_offset != 0 && trimmed_query[start_offset - 1] != '&') {
continue;
}
if (!component_is_prefix) {
// Verify that the character after the component substring is valid
// (either we're at the end of the query string, or are followed by an
// ampersand).
const size_t after_offset = start_offset + component.length();
if (after_offset < trimmed_query.length() &&
trimmed_query[after_offset] != '&') {
continue;
}
}
return true;
}
return false;
}
} // namespace
bool WasStartedInForegroundOptionalEventInForeground(
const base::Optional<base::TimeDelta>& event,
const PageLoadExtraInfo& info) {
return info.started_in_foreground && event &&
(!info.first_background_time ||
event.value() <= info.first_background_time.value());
}
bool WasStartedInBackgroundOptionalEventInForeground(
const base::Optional<base::TimeDelta>& event,
const PageLoadExtraInfo& info) {
return !info.started_in_foreground && event && info.first_foreground_time &&
info.first_foreground_time.value() <= event.value() &&
(!info.first_background_time ||
event.value() <= info.first_background_time.value());
}
PageAbortInfo GetPageAbortInfo(const PageLoadExtraInfo& info) {
if (IsBackgroundAbort(info)) {
// Though most cases where a tab is backgrounded are user initiated, we
// can't be certain that we were backgrounded due to a user action. For
// example, on Android, the screen times out after a period of inactivity,
// resulting in a non-user-initiated backgrounding.
return {ABORT_BACKGROUND, UserInitiatedInfo::NotUserInitiated(),
info.first_background_time.value()};
}
PageAbortReason abort_reason =
GetAbortReasonForEndReason(info.page_end_reason);
if (abort_reason == ABORT_NONE)
return PageAbortInfo();
return {abort_reason, info.page_end_user_initiated_info,
info.page_end_time.value()};
}
base::Optional<base::TimeDelta> GetInitialForegroundDuration(
const PageLoadExtraInfo& info,
base::TimeTicks app_background_time) {
if (!info.started_in_foreground)
return base::Optional<base::TimeDelta>();
base::Optional<base::TimeDelta> time_on_page =
OptionalMin(info.first_background_time, info.page_end_time);
// If we don't have a time_on_page value yet, and we have an app background
// time, use the app background time as our end time. This addresses cases
// where the Chrome app is backgrounded before the page load is complete, on
// platforms where Chrome may be killed once it goes into the background
// (Android). In these cases, we use the app background time as the 'end
// time'.
if (!time_on_page && !app_background_time.is_null()) {
time_on_page = app_background_time - info.navigation_start;
}
return time_on_page;
}
bool DidObserveLoadingBehaviorInAnyFrame(
const page_load_metrics::PageLoadExtraInfo& info,
blink::WebLoadingBehaviorFlag behavior) {
const int all_frame_loading_behavior_flags =
info.main_frame_metadata.behavior_flags |
info.subframe_metadata.behavior_flags;
return (all_frame_loading_behavior_flags & behavior) != 0;
}
bool IsGoogleSearchHostname(const GURL& url) {
base::Optional<std::string> result =
page_load_metrics::GetGoogleHostnamePrefix(url);
return result && result.value() == "www";
}
bool IsGoogleSearchResultUrl(const GURL& url) {
// NOTE: we do not require 'q=' in the query, as AJAXy search may instead
// store the query in the URL fragment.
if (!IsGoogleSearchHostname(url)) {
return false;
}
if (!QueryContainsComponentPrefix(url.query_piece(), "q=") &&
!QueryContainsComponentPrefix(url.ref_piece(), "q=")) {
return false;
}
const base::StringPiece path = url.path_piece();
return path == "/search" || path == "/webhp" || path == "/custom" ||
path == "/";
}
bool IsGoogleSearchRedirectorUrl(const GURL& url) {
if (!IsGoogleSearchHostname(url))
return false;
// The primary search redirector. Google search result redirects are
// differentiated from other general google redirects by 'source=web' in the
// query string.
if (url.path_piece() == "/url" && url.has_query() &&
QueryContainsComponent(url.query_piece(), "source=web")) {
return true;
}
// Intent-based navigations from search are redirected through a second
// redirector, which receives its redirect URL in the fragment/hash/ref
// portion of the URL (the portion after '#'). We don't check for the presence
// of certain params in the ref since this redirector is only used for
// redirects from search.
return url.path_piece() == "/searchurl/r.html" && url.has_ref();
}
bool QueryContainsComponent(const base::StringPiece query,
const base::StringPiece component) {
return QueryContainsComponentHelper(query, component, false);
}
bool QueryContainsComponentPrefix(const base::StringPiece query,
const base::StringPiece component) {
return QueryContainsComponentHelper(query, component, true);
}
} // namespace page_load_metrics
| 2,858 |
843 | <filename>api-metastore/src/test/java/org/zalando/nakadi/service/PaginationServiceTest.java
package org.zalando.nakadi.service;
import com.google.common.collect.Lists;
import org.junit.Assert;
import org.junit.Test;
import org.zalando.nakadi.domain.PaginationWrapper;
import java.util.Collections;
public class PaginationServiceTest {
@Test
public void testPaginationPrev() {
final PaginationService paginationService = new PaginationService();
final PaginationWrapper paginationWrapper =
paginationService.paginate(2, 5, "/schemas", (o, l) -> Collections.emptyList(), () -> 1);
Assert.assertFalse(paginationWrapper.getLinks().getNext().isPresent());
Assert.assertFalse(paginationWrapper.getLinks().getPrev().isPresent());
}
@Test
public void testPaginationPrev2() {
final PaginationService paginationService = new PaginationService();
final PaginationWrapper paginationWrapper =
paginationService.paginate(2, 5, "/schemas", (o, l) -> Collections.emptyList(), () -> 20);
Assert.assertFalse(paginationWrapper.getLinks().getNext().isPresent());
Assert.assertEquals("/schemas?offset=0&limit=5", paginationWrapper.getLinks().getPrev().get().getHref());
}
@Test
public void testPaginationPrev3() {
final PaginationService paginationService = new PaginationService();
final PaginationWrapper paginationWrapper =
paginationService.paginate(2, 5, "/schemas", (o, l) -> Collections.emptyList(), () -> 2);
Assert.assertFalse(paginationWrapper.getLinks().getNext().isPresent());
Assert.assertFalse(paginationWrapper.getLinks().getPrev().isPresent());
}
@Test
public void testPaginationPrev4() {
final PaginationService paginationService = new PaginationService();
final PaginationWrapper paginationWrapper =
paginationService.paginate(2, 5, "/schemas", (o, l) -> Collections.emptyList(), () -> 5);
Assert.assertFalse(paginationWrapper.getLinks().getNext().isPresent());
Assert.assertFalse(paginationWrapper.getLinks().getPrev().isPresent());
}
@Test
public void testPaginationEmpty() {
final PaginationService paginationService = new PaginationService();
final PaginationWrapper paginationWrapper =
paginationService.paginate(0, 5, "/schemas", (o, l) -> Collections.emptyList(), () -> 5);
Assert.assertFalse(paginationWrapper.getLinks().getNext().isPresent());
Assert.assertFalse(paginationWrapper.getLinks().getPrev().isPresent());
}
@Test
public void testPaginationNext() {
final PaginationService paginationService = new PaginationService();
final PaginationWrapper paginationWrapper =
paginationService.paginate(0, 3, "/schemas",
(o, l) -> Lists.newArrayList("One", "Two", "Three", "Four"),
() -> 1);
Assert.assertEquals("/schemas?offset=3&limit=3", paginationWrapper.getLinks().getNext().get().getHref());
Assert.assertFalse(paginationWrapper.getLinks().getPrev().isPresent());
}
@Test
public void testPaginationPrevAndNext() {
final PaginationService paginationService = new PaginationService();
final PaginationWrapper paginationWrapper =
paginationService.paginate(2, 3, "/schemas",
(o, l) -> Lists.newArrayList("One", "Two", "Three", "Four"),
() -> 1);
Assert.assertEquals("/schemas?offset=5&limit=3", paginationWrapper.getLinks().getNext().get().getHref());
Assert.assertEquals("/schemas?offset=0&limit=3", paginationWrapper.getLinks().getPrev().get().getHref());
}
} | 1,494 |
348 | {"nom":"Lachapelle","circ":"4ème circonscription","dpt":"Somme","inscrits":66,"abs":26,"votants":40,"blancs":6,"nuls":1,"exp":33,"res":[{"nuance":"FN","nom":"<NAME>","voix":26},{"nuance":"REM","nom":"<NAME>","voix":7}]} | 90 |
5,169 | {
"name": "WBZImportInvoice",
"version": "0.0.16",
"summary": "微报账项目集合微信和支付宝的卡包导入发票",
"description": "微报账项目集合微信和支付宝的卡包导入发票,供别人方便使用",
"homepage": "https://github.com/Dreamle/WBZImportInvoice",
"license": "MIT",
"authors": {
"dreamLee": "<EMAIL>"
},
"platforms": {
"ios": "9.0"
},
"source": {
"git": "https://github.com/Dreamle/WBZImportInvoice.git",
"tag": "0.0.16"
},
"vendored_frameworks": "**/WBZLib.framework",
"source_files": [
"WBZImportInvoice",
"*.*"
],
"dependencies": {
"WechatOpenSDK": [
"~> 1.8.4"
]
},
"requires_arc": true
}
| 368 |
1,830 | <reponame>thirteen13Floor/zeebe
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under
* one or more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
* Licensed under the Zeebe Community License 1.1. You may not use this file
* except in compliance with the Zeebe Community License 1.1.
*/
package io.camunda.zeebe.protocol.jackson.record;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.As;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonTypeIdResolver;
import io.camunda.zeebe.protocol.jackson.record.RecordBuilder.ImmutableRecord;
import io.camunda.zeebe.protocol.record.Record;
import io.camunda.zeebe.protocol.record.RecordType;
import io.camunda.zeebe.protocol.record.RecordValue;
import io.camunda.zeebe.protocol.record.RejectionType;
import io.camunda.zeebe.protocol.record.intent.Intent;
import org.immutables.value.Value;
@Value.Immutable
@ZeebeStyle
@JsonDeserialize(as = ImmutableRecord.class)
public abstract class AbstractRecord<T extends RecordValue>
implements Record<T>, DefaultJsonSerializable {
@Value.Default
@JsonTypeInfo(use = Id.CUSTOM, include = As.EXTERNAL_PROPERTY, property = "valueType")
@JsonTypeIdResolver(IntentTypeIdResolver.class)
@Override
public Intent getIntent() {
return Intent.UNKNOWN;
}
@Value.Default
@Override
public RecordType getRecordType() {
return RecordType.NULL_VAL;
}
@Value.Default
@Override
public RejectionType getRejectionType() {
return RejectionType.NULL_VAL;
}
@JsonTypeInfo(use = Id.CUSTOM, include = As.EXTERNAL_PROPERTY, property = "valueType")
@JsonTypeIdResolver(ValueTypeIdResolver.class)
@Override
public abstract T getValue();
/** @return itself as the object is immutable and can be used as is */
@SuppressWarnings({"MethodDoesntCallSuperMethod", "squid:S2975", "squid:S1182"})
@Override
public Record<T> clone() {
return this;
}
}
| 715 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_BROWSER_DEVTOOLS_PROTOCOL_DEVTOOLS_NETWORK_RESOURCE_LOADER_H_
#define CONTENT_BROWSER_DEVTOOLS_PROTOCOL_DEVTOOLS_NETWORK_RESOURCE_LOADER_H_
#include <memory>
#include "content/common/content_export.h"
#include "mojo/public/cpp/bindings/remote.h"
#include "services/network/public/cpp/resource_request.h"
#include "services/network/public/cpp/simple_url_loader.h"
#include "services/network/public/cpp/simple_url_loader_stream_consumer.h"
#include "services/network/public/mojom/url_loader.mojom.h"
#include "services/network/public/mojom/url_loader_factory.mojom.h"
namespace content {
namespace protocol {
// The DevToolsNetworkResourceLoader loads a network resource for DevTools
// and passes it to the provided call-back once loading completed. Currently,
// the resource is provided as a string, but in the future this will use
// a DevToolsStreamPipe. This is why we don't just use DownloadToString.
class CONTENT_EXPORT DevToolsNetworkResourceLoader
: public network::SimpleURLLoaderStreamConsumer {
public:
using CompletionCallback =
base::OnceCallback<void(DevToolsNetworkResourceLoader*,
const net::HttpResponseHeaders* rh,
bool success,
int net_error,
std::string content)>;
enum class Caching { kBypass, kDefault };
enum class Credentials { kInclude, kSameSite };
// The |origin| and |site_for_cookies| parameters are supplied by the caller,
// and we trust the caller that these values are reasonable. They are usually
// taken from a renderer host / worker host that was identified by the
// DevTools front-end based on the inspected page.
static std::unique_ptr<DevToolsNetworkResourceLoader> Create(
mojo::Remote<network::mojom::URLLoaderFactory> url_loader_factory,
GURL gurl,
const url::Origin& origin,
net::SiteForCookies site_for_cookies,
Caching caching,
Credentials include_credentials,
CompletionCallback complete_callback);
~DevToolsNetworkResourceLoader() override;
// Disallow copy and assignment.
DevToolsNetworkResourceLoader(const DevToolsNetworkResourceLoader&) = delete;
DevToolsNetworkResourceLoader& operator=(
const DevToolsNetworkResourceLoader&) = delete;
private:
DevToolsNetworkResourceLoader(
network::ResourceRequest resource_request,
const net::NetworkTrafficAnnotationTag& traffic_annotation,
mojo::Remote<network::mojom::URLLoaderFactory> url_loader_factory,
CompletionCallback complete_callback);
void DownloadAsStream();
void OnDataReceived(base::StringPiece chunk,
base::OnceClosure resume) override;
void OnComplete(bool success) override;
void OnRetry(base::OnceClosure start_retry) override;
const network::ResourceRequest resource_request_;
const net::NetworkTrafficAnnotationTag traffic_annotation_;
std::unique_ptr<network::SimpleURLLoader> loader_;
mojo::Remote<network::mojom::URLLoaderFactory> url_loader_factory_;
CompletionCallback completion_callback_;
std::string content_;
};
} // namespace protocol
} // namespace content
#endif // CONTENT_BROWSER_DEVTOOLS_PROTOCOL_DEVTOOLS_NETWORK_RESOURCE_LOADER_H_
| 1,179 |
5,169 | <gh_stars>1000+
{
"name": "KissPaginate",
"version": "0.3.8",
"summary": "Simplify your implementation of tableView paginate.",
"description": "```\nclass ViewController: PaginateViewController {\n @IBOutlet weak var noElementLabel: UILabel!\n\n override func viewDidLoad() {\n super.viewDidLoad()\n tableView.dataSource = self\n refreshElements()\n }\n\n override var getElementsClosure: (page: Int, successHandler: GetElementsSuccessHandler, failureHandler: (error: Error) -> Void) -> Void {\n return getElementList\n }\n\n func getElementList(page: Int, successHandler: GetElementsSuccessHandler, failureHandler: (error: Error) -> Void) {\n let elements = (0...20).map { \"page (page), element index\" + String($0) }\n delay(2) {\n successHandler(elements: elements, hasMoreElements: true)\n }\n }\n\n override func displayNoElementIfNeeded(noElement: Bool) {\n noElementLabel.hidden = !noElement\n }\n}\n\nextension ViewController: UITableViewDataSource {\n func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int {\n return elements.count\n }\n\n func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {\n let cell = tableView.dequeueReusableCellWithIdentifier(\"Cell\")!\n let element = getElement(String.self, at: indexPath.row)\n cell.textLabel?.text = element\n if elements.count == indexPath.row + 1 {\n loadNextPage()\n }\n return cell\n }\n}\n```",
"homepage": "https://github.com/WANGjieJacques/KissPaginate",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/WANGjieJacques/KissPaginate.git",
"tag": "0.3.8"
},
"platforms": {
"ios": "8.0"
},
"source_files": "KissPaginate/Classes/**/*",
"pushed_with_swift_version": "3.0.1"
}
| 820 |
1,133 | <reponame>fromjss/comdb2
package com.bloomberg.comdb2.jdbc;
import java.sql.*;
import java.util.logging.*;
import org.junit.*;
import org.junit.Assert.*;
import java.util.*;
public class UnpooledDataSourceTest {
@Test public void testDatabaseInitSqls() throws SQLException {
String db = System.getProperty("cdb2jdbc.test.database");
String cluster = System.getProperty("cdb2jdbc.test.cluster");
UnpooledDataSource ds = new UnpooledDataSource();
ds.setDriver("com.bloomberg.comdb2.jdbc.Driver");
ds.setUrl(String.format("jdbc:comdb2://%s/%s?maxquerytime=1", cluster, db));
ds.setConnectionInitSqls(Arrays.asList("SET TIMEZONE Zulu"));
Connection conn = ds.getConnection();
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("SELECT CAST(NOW() AS TEXT)");
String zulu = rs.getString(1);
Assert.assertTrue("Should get back a time in Zulu", zulu.contains("Zulu"));
rs.close();
/* Also test URL options in UnpooledDataSource. */
try {
stmt.executeQuery("SELECT SLEEP(5)");
} catch (SQLException e) {
Assert.assertTrue("Should exceed limit", e.getMessage().contains("query timed out"));
}
stmt.close();
conn.close();
/* De-register myself from the driver manager to not interfere with other tests. */
Enumeration<java.sql.Driver> drivers = DriverManager.getDrivers();
while (drivers.hasMoreElements()) {
java.sql.Driver driver = drivers.nextElement();
if (!(driver instanceof com.bloomberg.comdb2.jdbc.Driver)) {
DriverManager.deregisterDriver(driver);
break;
}
}
}
}
| 762 |
892 | <gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-285r-jf89-jj3c",
"modified": "2022-05-02T03:19:32Z",
"published": "2022-05-02T03:19:32Z",
"aliases": [
"CVE-2009-0904"
],
"details": "The IBM Stax XMLStreamWriter in the Web Services component in IBM WebSphere Application Server (WAS) 6.1 before 172.16.17.32 does not properly process XML encoding, which allows remote attackers to bypass intended access restrictions and possibly modify data via \"XML fuzzing attacks\" sent through SOAP requests.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2009-0904"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/51490"
},
{
"type": "WEB",
"url": "http://www-01.ibm.com/support/docview.wss?uid=swg27007951"
},
{
"type": "WEB",
"url": "http://www-1.ibm.com/support/docview.wss?uid=swg1PK84015"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/35741"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 541 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.