text
stringlengths 2
1.04M
| meta
dict |
---|---|
using Microsoft.Win32.SafeHandles;
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class Advapi32
{
[DllImport(Interop.Libraries.Advapi32, SetLastError = true)]
internal static extern unsafe bool GetTokenInformation(
SafeAccessTokenHandle TokenHandle,
TOKEN_INFORMATION_CLASS TokenInformationClass,
void* TokenInformation,
uint TokenInformationLength,
out uint ReturnLength);
}
}
| {
"content_hash": "5b20709450f729bfb5ad9d051a883a3d",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 68,
"avg_line_length": 31.58823529411765,
"alnum_prop": 0.7039106145251397,
"repo_name": "wtgodbe/corefx",
"id": "4261dffa803434f443b8bc02cd690d5f98c8bdc8",
"size": "741",
"binary": false,
"copies": "32",
"ref": "refs/heads/master",
"path": "src/Common/src/Interop/Windows/Advapi32/Interop.GetTokenInformation_void.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "1C Enterprise",
"bytes": "280724"
},
{
"name": "ASP",
"bytes": "1687"
},
{
"name": "Batchfile",
"bytes": "11027"
},
{
"name": "C",
"bytes": "3803475"
},
{
"name": "C#",
"bytes": "181225579"
},
{
"name": "C++",
"bytes": "1521"
},
{
"name": "CMake",
"bytes": "79434"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "HTML",
"bytes": "653"
},
{
"name": "Makefile",
"bytes": "13780"
},
{
"name": "OpenEdge ABL",
"bytes": "137969"
},
{
"name": "Perl",
"bytes": "3895"
},
{
"name": "PowerShell",
"bytes": "192527"
},
{
"name": "Python",
"bytes": "1535"
},
{
"name": "Roff",
"bytes": "9422"
},
{
"name": "Shell",
"bytes": "131260"
},
{
"name": "TSQL",
"bytes": "96941"
},
{
"name": "Visual Basic",
"bytes": "2135715"
},
{
"name": "XSLT",
"bytes": "514720"
}
],
"symlink_target": ""
} |
Some basic python scripts to check URLs.
| {
"content_hash": "12ac22293f454ab3c62be84639fe72cc",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 40,
"avg_line_length": 41,
"alnum_prop": 0.8048780487804879,
"repo_name": "sickboy83/python",
"id": "c52234ff2fa115690e7e7a594d3d898fcab70b53",
"size": "62",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1586"
}
],
"symlink_target": ""
} |
package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// ExpressRouteCircuitAuthorizationsClient is the network Client
type ExpressRouteCircuitAuthorizationsClient struct {
BaseClient
}
// NewExpressRouteCircuitAuthorizationsClient creates an instance of the ExpressRouteCircuitAuthorizationsClient
// client.
func NewExpressRouteCircuitAuthorizationsClient(subscriptionID string) ExpressRouteCircuitAuthorizationsClient {
return NewExpressRouteCircuitAuthorizationsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewExpressRouteCircuitAuthorizationsClientWithBaseURI creates an instance of the
// ExpressRouteCircuitAuthorizationsClient client.
func NewExpressRouteCircuitAuthorizationsClientWithBaseURI(baseURI string, subscriptionID string) ExpressRouteCircuitAuthorizationsClient {
return ExpressRouteCircuitAuthorizationsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates or updates an authorization in the specified express route circuit.
// Parameters:
// resourceGroupName - the name of the resource group.
// circuitName - the name of the express route circuit.
// authorizationName - the name of the authorization.
// authorizationParameters - parameters supplied to the create or update express route circuit authorization
// operation.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string, authorizationParameters ExpressRouteCircuitAuthorization) (result ExpressRouteCircuitAuthorizationsCreateOrUpdateFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteCircuitAuthorizationsClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, circuitName, authorizationName, authorizationParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string, authorizationParameters ExpressRouteCircuitAuthorization) (*http.Request, error) {
pathParameters := map[string]interface{}{
"authorizationName": autorest.Encode("path", authorizationName),
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-09-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
authorizationParameters.Etag = nil
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}", pathParameters),
autorest.WithJSON(authorizationParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdateSender(req *http.Request) (future ExpressRouteCircuitAuthorizationsCreateOrUpdateFuture, err error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdateResponder(resp *http.Response) (result ExpressRouteCircuitAuthorization, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified authorization from the specified express route circuit.
// Parameters:
// resourceGroupName - the name of the resource group.
// circuitName - the name of the express route circuit.
// authorizationName - the name of the authorization.
func (client ExpressRouteCircuitAuthorizationsClient) Delete(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (result ExpressRouteCircuitAuthorizationsDeleteFuture, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteCircuitAuthorizationsClient.Delete")
defer func() {
sc := -1
if result.Response() != nil {
sc = result.Response().StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceGroupName, circuitName, authorizationName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client ExpressRouteCircuitAuthorizationsClient) DeletePreparer(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"authorizationName": autorest.Encode("path", authorizationName),
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-09-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) DeleteSender(req *http.Request) (future ExpressRouteCircuitAuthorizationsDeleteFuture, err error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
var resp *http.Response
resp, err = autorest.SendWithSender(client, req, sd...)
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets the specified authorization from the specified express route circuit.
// Parameters:
// resourceGroupName - the name of the resource group.
// circuitName - the name of the express route circuit.
// authorizationName - the name of the authorization.
func (client ExpressRouteCircuitAuthorizationsClient) Get(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (result ExpressRouteCircuitAuthorization, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteCircuitAuthorizationsClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, circuitName, authorizationName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client ExpressRouteCircuitAuthorizationsClient) GetPreparer(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"authorizationName": autorest.Encode("path", authorizationName),
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-09-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) GetSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) GetResponder(resp *http.Response) (result ExpressRouteCircuitAuthorization, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets all authorizations in an express route circuit.
// Parameters:
// resourceGroupName - the name of the resource group.
// circuitName - the name of the circuit.
func (client ExpressRouteCircuitAuthorizationsClient) List(ctx context.Context, resourceGroupName string, circuitName string) (result AuthorizationListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteCircuitAuthorizationsClient.List")
defer func() {
sc := -1
if result.alr.Response.Response != nil {
sc = result.alr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName, circuitName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.alr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "List", resp, "Failure sending request")
return
}
result.alr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client ExpressRouteCircuitAuthorizationsClient) ListPreparer(ctx context.Context, resourceGroupName string, circuitName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-09-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) ListSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), azure.DoRetryWithRegistration(client.Client))
return autorest.SendWithSender(client, req, sd...)
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) ListResponder(resp *http.Response) (result AuthorizationListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client ExpressRouteCircuitAuthorizationsClient) listNextResults(ctx context.Context, lastResults AuthorizationListResult) (result AuthorizationListResult, err error) {
req, err := lastResults.authorizationListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client ExpressRouteCircuitAuthorizationsClient) ListComplete(ctx context.Context, resourceGroupName string, circuitName string) (result AuthorizationListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ExpressRouteCircuitAuthorizationsClient.List")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.List(ctx, resourceGroupName, circuitName)
return
}
| {
"content_hash": "8a63f7278f0e00c5ce4bfce54a007b16",
"timestamp": "",
"source": "github",
"line_count": 399,
"max_line_length": 295,
"avg_line_length": 43.30827067669173,
"alnum_prop": 0.7818287037037037,
"repo_name": "sjug/origin",
"id": "81d3391fe6b094d8958da1eb585763f10bf93683",
"size": "17280",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "vendor/github.com/Azure/azure-sdk-for-go/services/network/mgmt/2017-09-01/network/expressroutecircuitauthorizations.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "921"
},
{
"name": "Dockerfile",
"bytes": "2240"
},
{
"name": "Go",
"bytes": "2353215"
},
{
"name": "Makefile",
"bytes": "6395"
},
{
"name": "Python",
"bytes": "14593"
},
{
"name": "Shell",
"bytes": "310377"
}
],
"symlink_target": ""
} |
using Bivi.Data.Context;
using Bivi.Domaine;
using Bivi.Business.Depots;
using System.Linq;
using System.Collections.Generic;
using System.Data.SqlClient;
using System.Data;
using Bivi.Domaine.Composite;
namespace Bivi.Data.Depots
{
public partial class DepotNiveauAbonnement : Depot<NiveauAbonnement>, IDepotNiveauAbonnement
{
private readonly IDepotNiveauAbonnementThematique _niveauAbonnementThematiques;
private readonly IDepotLien_NiveauAbonnement_PerimetreAbonnement _lien_NiveauAbonnement_PerimetreAbonnement;
public DepotNiveauAbonnement(IBiviDbContext dataContext,
IDepotLien_NiveauAbonnement_PerimetreAbonnement lien_NiveauAbonnement_PerimetreAbonnement,
IDepotNiveauAbonnementThematique niveauAbonnementThematiques)
: this(dataContext)
{
_niveauAbonnementThematiques = niveauAbonnementThematiques;
_lien_NiveauAbonnement_PerimetreAbonnement = lien_NiveauAbonnement_PerimetreAbonnement;
}
public IEnumerable<dynamic> Search(string code, string codeQualiac, string libelle, long? siteID)
{
List<SqlParameter> Parameters = new List<SqlParameter>();
Parameters.Add(new SqlParameter("@Code", SqlDbType.NVarChar) { Value = code });
Parameters.Add(new SqlParameter("@CodeQualiac", SqlDbType.NVarChar) { Value = codeQualiac });
Parameters.Add(new SqlParameter("@Libelle", SqlDbType.NVarChar) { Value = libelle });
if (siteID.HasValue)
{
Parameters.Add(new SqlParameter("@SiteID", SqlDbType.BigInt) { Value = siteID.Value });
}
return DbContext.ExecuteStoredProcedureDynamic("sp_SearchNiveauAbonnement", Parameters);
}
public NiveauAbonnement SaveInfosGenerales(NiveauAbonnementComposite composite)
{
NiveauAbonnement _na;
if (composite.ID > 0)
{
_na = this.GetById(composite.ID);
}
else
{
_na = new NiveauAbonnement();
}
_na.Code = composite.Code;
_na.CodeEchange = composite.CodeEchange;
_na.Description = composite.Description;
_na.Multi = composite.Multi;
_na.AvecNormes = composite.AvecNormes;
_na.SiteID = composite.SiteID;
#region NiveaAbonnementThematiques
var thematiquesBdd = _niveauAbonnementThematiques.Find(x => x.NiveauAbonnementID == composite.ID).ToList();
foreach (var d in thematiquesBdd.Where(x => !composite.Thematiques.Where(y => y.IsSelected).Select(z => z.ID).Contains(x.BlocID)))
{
_niveauAbonnementThematiques.Delete(d);
}
//Thematique à insérer
var thematiques = _niveauAbonnementThematiques.Find(x => x.NiveauAbonnementID == composite.ID).ToList();
var thematiquesToInsert = composite.Thematiques.Where(x => x.IsSelected && !thematiques.Select(y => y.BlocID).Contains(x.ID));
foreach (var thematique in thematiquesToInsert)
{
_niveauAbonnementThematiques.Add(
new NiveauAbonnementThematique()
{
BlocID = thematique.ID,
NiveauAbonnementID = composite.ID
});
}
#endregion
#region PerimetreAbonnements
var perimetresBdd = _lien_NiveauAbonnement_PerimetreAbonnement.Find(x => x.NiveauAbonnementID == composite.ID).ToList();
foreach (var d in perimetresBdd.Where(x => !composite.PerimetreAbonnements.Select(y => y.ID).Contains(x.ID)))
{
_lien_NiveauAbonnement_PerimetreAbonnement.Delete(d);
}
//Thematique à insérer
var perimetres = _lien_NiveauAbonnement_PerimetreAbonnement.Find(x => x.NiveauAbonnementID == composite.ID).ToList();
foreach (var perimetre in composite.PerimetreAbonnements)
{
var _perimetre = perimetres.Where(x => x.ID == perimetre.ID).FirstOrDefault();
if (_perimetre == null)
{
_lien_NiveauAbonnement_PerimetreAbonnement.Add(
new Lien_NiveauAbonnement_PerimetreAbonnement()
{
NiveauAbonnementID = composite.ID,
PerimetreAbonnementID = perimetre.PerimetreAbonnementID
});
}
else
{
_perimetre.PerimetreAbonnementID = perimetre.PerimetreAbonnementID;
_lien_NiveauAbonnement_PerimetreAbonnement.Update();
}
}
#endregion
if (composite.ID > 0)
{
this.Update();
}
else
{
this.Add(_na);
}
return _na;
}
}
}
| {
"content_hash": "8cf0854493de71a2cf7ab05a65fe17f1",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 142,
"avg_line_length": 39.46212121212121,
"alnum_prop": 0.5732386254559416,
"repo_name": "apo-j/Projects_Working",
"id": "8cfb6a882fd41931d53af57fddf9dc3edbd5ac57",
"size": "5209",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Bivi/src/Bivi.Common/Bivi.Data/Depots/DepotNiveauAbonnement.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "16118"
},
{
"name": "Batchfile",
"bytes": "1096"
},
{
"name": "C#",
"bytes": "27262375"
},
{
"name": "CSS",
"bytes": "8474090"
},
{
"name": "Groff",
"bytes": "2101703"
},
{
"name": "HTML",
"bytes": "4910101"
},
{
"name": "JavaScript",
"bytes": "20716565"
},
{
"name": "PHP",
"bytes": "9283"
},
{
"name": "XSLT",
"bytes": "930531"
}
],
"symlink_target": ""
} |
<?php
namespace Mage\Review\Test\Constraint;
use Magento\Mtf\Constraint\AbstractConstraint;
use Magento\Mtf\ObjectManager;
use Mage\Catalog\Test\Page\Product\CatalogProductView;
/**
* Assert that product don't have a review on product page.
*/
class AssertProductReviewIsAbsentOnProductPage extends AbstractConstraint
{
/* tags */
const SEVERITY = 'low';
/* end tags */
/**
* Verify message for assert.
*/
const NO_REVIEW_LINK_TEXT = 'Be the first to review this product';
/**
* Catalog product view page.
*
* @var CatalogProductView
*/
protected $catalogProductView;
/**
* @constructor
* @param ObjectManager $objectManager
* @param CatalogProductView $catalogProductView
*/
public function __construct(ObjectManager $objectManager, CatalogProductView $catalogProductView)
{
parent::__construct($objectManager);
$this->catalogProductView = $catalogProductView;
}
/**
* Assert that product doesn't have a review on product page.
*
* @return void
*/
public function processAssert()
{
$this->catalogProductView->getViewBlock()->openCustomInformationTab('Reviews');
\PHPUnit_Framework_Assert::assertFalse(
$this->catalogProductView->getReviewsBlock()->isVisibleReviewItems(),
'No reviews below the form required.'
);
\PHPUnit_Framework_Assert::assertEquals(
self::NO_REVIEW_LINK_TEXT,
trim($this->catalogProductView->getReviewsBlock()->getAddReviewLink()->getText()),
sprintf('"%s" link is not available', self::NO_REVIEW_LINK_TEXT)
);
}
/**
* Returns a string representation of the object.
*
* @return string
*/
public function toString()
{
return 'Product do not have a review on product page.';
}
}
| {
"content_hash": "1306c9a9f3eca4cbf1810a8b6184dd8c",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 101,
"avg_line_length": 26.774647887323944,
"alnum_prop": 0.640189374013677,
"repo_name": "z-v/iboxGento2",
"id": "455e63f98228d3f0ede2a07cae42a074bf71925e",
"size": "2845",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "dev/tests/functional/tests/app/Mage/Review/Test/Constraint/AssertProductReviewIsAbsentOnProductPage.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "20018"
},
{
"name": "ApacheConf",
"bytes": "1187"
},
{
"name": "Batchfile",
"bytes": "1036"
},
{
"name": "CSS",
"bytes": "2077843"
},
{
"name": "HTML",
"bytes": "5840333"
},
{
"name": "JavaScript",
"bytes": "1563785"
},
{
"name": "PHP",
"bytes": "49596017"
},
{
"name": "PowerShell",
"bytes": "1028"
},
{
"name": "Ruby",
"bytes": "288"
},
{
"name": "Shell",
"bytes": "3849"
},
{
"name": "XSLT",
"bytes": "2066"
}
],
"symlink_target": ""
} |
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="40dp"
android:height="40dp"
android:viewportWidth="40"
android:viewportHeight="40"
android:tint="?attr/colorControlNormal">
<path
android:fillColor="@android:color/white"
android:pathData="M8.333,35V5H24.417L31.667,12.25V35ZM23.708,12.917H30.25L23.708,6.417Z"/>
</vector>
| {
"content_hash": "b73bc3102cecfb8c35d1b5d5427095f8",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 96,
"avg_line_length": 38.8,
"alnum_prop": 0.7139175257731959,
"repo_name": "google/material-design-icons",
"id": "f8601ef2d87fc7aded2b0f6b42f864d5c9eadead",
"size": "388",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "symbols/android/note/materialsymbolssharp/note_wght200fill1_40px.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package Furl::Request;
use strict;
use warnings;
use utf8;
use Class::Accessor::Lite;
use Furl::Headers;
use Furl::HTTP;
Class::Accessor::Lite->mk_accessors(qw/ method uri protocol headers content /);
sub new {
my $class = shift;
my ($method, $uri, $headers, $content) = @_;
unless (defined $headers) {
$headers = +{};
}
unless (defined $content) {
$content = '';
}
bless +{
method => $method,
uri => $uri,
headers => Furl::Headers->new($headers),
content => $content,
}, $class;
}
sub parse {
my $class = shift;
my $raw_request = shift;
# I didn't use HTTP::Parser::XS for following reasons:
# 1. parse_http_request() function omits request content, but need to deal it.
# 2. this function parses header to PSGI env, but env/header mapping is troublesome.
return unless $raw_request =~ s!^(.+) (.+) (HTTP/1.\d+)\s*!!;
my ($method, $uri, $protocol) = ($1, $2, $3);
my ($header_str, $content) = split /\015?\012\015?\012/, $raw_request, 2;
my $headers = +{};
for (split /\015?\012/, $header_str) {
tr/\015\012//d;
my ($k, $v) = split /\s*:\s*/, $_, 2;
$headers->{lc $k} = $v;
# complete host_port
if (lc $k eq 'host') {
$uri = $v . $uri;
}
}
unless ($uri =~ /^http/) {
$uri = "http://$uri";
}
my $req = $class->new($method, $uri, $headers, $content);
$req->protocol($protocol);
return $req;
}
# alias
*body = \&content;
# shorthand
sub content_length { shift->headers->content_length }
sub content_type { shift->headers->content_type }
sub header { shift->headers->header(@_) }
sub request_line {
my $self = shift;
my $path_query = $self->uri . ''; # for URI.pm
$path_query =~ s!^https?://[^/]+!!;
my $method = $self->method || '';
my $protocol = $self->protocol || '';
return "$method $path_query $protocol";
}
sub as_http_request {
my $self = shift;
require HTTP::Request;
my $req = HTTP::Request->new(
$self->method,
$self->uri,
[ $self->headers->flatten ],
$self->content,
);
$req->protocol($self->protocol);
return $req;
}
sub as_hashref {
my $self = shift;
return +{
method => $self->method,
uri => $self->uri,
protocol => $self->protocol,
headers => [ $self->headers->flatten ],
content => $self->content,
};
}
sub as_string {
my $self = shift;
join("\015\012",
$self->method . ' ' . $self->uri . (defined($self->protocol) ? ' ' . $self->protocol : ''),
$self->headers->as_string,
ref($self->content) =~ qr{\A(?:ARRAY|HASH)\z} ? Furl::HTTP->make_x_www_form_urlencoded($self->content) : $self->content,
);
}
1;
__END__
=head1 NAME
Furl::Request - Request object for Furl
=head1 SYNOPSIS
my $f = Furl->new;
my $req = Furl::Request->new($method, $uri, $headers, $content);
my $res = $f->request($req);
print $req->request_line, "\n";
my $http_req = $req->as_http_request;
my $req_hash = $req->as_hashref;
=head1 DESCRIPTION
This is a HTTP request object in Furl.
=head1 CONSTRUCTOR
my $req = Furl::Request->new($method, $uri);
# or
my $req = Furl::Request->new($method, $uri, \%headers);
# or
my $req = Furl::Request->new($method, $uri, \%headers, $content);
# and
my $req = Furl::Request->parse($http_request_raw_string);
=head1 INSTANCE METHODS
=over 4
=item $req->method($method)
Gets/Sets HTTP request method
=item $req->uri($uri)
Gets/Sets request URI
=item $req->headers($headers)
Gets/Sets instance of L<Furl::Headers>
=item $req->content($content)
=item $req->body($content)
Gets/Sets request body in scalar.
=item $req->protocol($protocol)
$req->protocol('HTTP/1.1');
print $req->protocol; #=> "HTTP/1.1"
Gets/Sets HTTP protocol in string.
=item $req->content_length
=item $req->content_type
=item $req->header
Shorthand to access L<Furl::Headers>.
=item $req->as_http_request
Make instance of L<HTTP::Request> from L<Furl::Request>.
=item $req->as_hashref
Convert request object to HashRef.
Format is following:
method: Str
uri: Str
protocol: Str
headers: ArrayRef[Str]
content: Str
=item $req->request_line
print $req->request_line; #=> "GET / HTTP/1.1"
Returns HTTP request line.
=back
| {
"content_hash": "b4f0c5192d221dff29277478be9f4902",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 128,
"avg_line_length": 20.74418604651163,
"alnum_prop": 0.5715246636771301,
"repo_name": "kayac/isucon3",
"id": "c03bd4ff0f132c17f366ea41077ed395f8d91c9d",
"size": "4460",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "final/misc/Furl-patched/lib/Furl/Request.pm",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "322"
},
{
"name": "C",
"bytes": "1152674"
},
{
"name": "C++",
"bytes": "793"
},
{
"name": "CSS",
"bytes": "13904"
},
{
"name": "Go",
"bytes": "1404390"
},
{
"name": "HTML",
"bytes": "18819"
},
{
"name": "JavaScript",
"bytes": "63928"
},
{
"name": "Nginx",
"bytes": "555"
},
{
"name": "PHP",
"bytes": "294775"
},
{
"name": "Perl",
"bytes": "4187313"
},
{
"name": "Perl6",
"bytes": "150887"
},
{
"name": "Python",
"bytes": "21264"
},
{
"name": "Ruby",
"bytes": "26993"
},
{
"name": "Shell",
"bytes": "5025"
},
{
"name": "XS",
"bytes": "123225"
}
],
"symlink_target": ""
} |
package gappsockets.common.socks;
import java.util.Arrays;
import gappsockets.common.util.Bytes;
import gappsockets.common.util.Shorts;
public final class UdpRequestHeader {
private static final class Params {
private int currentFragmentNumber;
private AddressType addressType;
private String desiredDestinationAddress;
private int desiredDestinationPort;
private int userDataStartIndex;
private byte[] byteArray;
}
private static final int RSV = 0x0000;
private static final int RSV_INDEX = 0;
private static final int FRAG_INDEX = 2;
private static final int ATYP_INDEX = 3;
private static final int DST_ADDR_INDEX = 4;
private static final int RSV_LENGTH = 2;
private static final int FRAG_LENGTH = 1;
private static final int ATYP_LENGTH = 1;
private static final int MIN_DST_ADDR_LENGTH = 1;
private static final int MAX_DST_ADDR_LENGTH = 255;
private static final int DST_PORT_LENGTH = 2;
private static final int MIN_DATA_LENGTH = 0;
private static final int MIN_LENGTH = RSV_LENGTH + FRAG_LENGTH
+ ATYP_LENGTH + MIN_DST_ADDR_LENGTH + DST_PORT_LENGTH
+ MIN_DATA_LENGTH;
public static UdpRequestHeader newInstance(final byte[] b) {
if (b.length < MIN_LENGTH) {
throw new IllegalArgumentException(String.format(
"expected length must be at least %s. actual length is %s",
MIN_LENGTH,
b.length));
}
int frag = Bytes.unsigned(b[FRAG_INDEX]);
AddressType atyp = AddressType.valueOf(b[ATYP_INDEX]);
int dstAddrLength = atyp.getAddressLength(b[DST_ADDR_INDEX]);
String dstAddr = atyp.readAddress(Arrays.copyOfRange(
b, DST_ADDR_INDEX, DST_ADDR_INDEX + dstAddrLength));
int dstPort = Shorts.unsigned(Shorts.readShort(
b[DST_ADDR_INDEX + dstAddrLength],
b[DST_ADDR_INDEX + dstAddrLength + 1]));
int dataStartIndex = DST_ADDR_INDEX + dstAddrLength + DST_PORT_LENGTH;
Params params = new Params();
params.currentFragmentNumber = frag;
params.addressType = atyp;
params.desiredDestinationAddress = dstAddr;
params.desiredDestinationPort = dstPort;
params.userDataStartIndex = dataStartIndex;
params.byteArray = Arrays.copyOf(b, b.length);
return new UdpRequestHeader(params);
}
public static UdpRequestHeader newInstance(
final int currentFragmentNumber,
final AddressType addressType,
final String desiredDestinationAddress,
final int desiredDestinationPort,
final byte[] userData) {
if (currentFragmentNumber < Bytes.MIN_UNSIGNED_VALUE
|| currentFragmentNumber > Bytes.MAX_UNSIGNED_VALUE) {
throw new IllegalArgumentException(String.format(
"current fragment number must be no less than %s and no more than %s",
Bytes.MIN_UNSIGNED_VALUE,
Bytes.MAX_UNSIGNED_VALUE));
}
byte[] desiredDestinationAddressBytes = desiredDestinationAddress.getBytes();
if (desiredDestinationAddressBytes.length < MIN_DST_ADDR_LENGTH
|| desiredDestinationAddressBytes.length > MAX_DST_ADDR_LENGTH) {
throw new IllegalArgumentException(String.format(
"desired destination address must be no less than %s byte(s) and no more than %s byte(s)",
MIN_DST_ADDR_LENGTH,
MAX_DST_ADDR_LENGTH));
}
if (desiredDestinationPort < Shorts.MIN_UNSIGNED_VALUE
|| desiredDestinationPort > Shorts.MAX_UNSIGNED_VALUE) {
throw new IllegalArgumentException(String.format(
"desired destination port must be no less than %s and no more than %s",
Shorts.MIN_UNSIGNED_VALUE,
Shorts.MAX_UNSIGNED_VALUE));
}
byte[] dstAddr = addressType.writeAddress(desiredDestinationAddress);
byte[] dstPort = Shorts.writeShort((short) desiredDestinationPort);
byte[] b = new byte[RSV_LENGTH + FRAG_LENGTH + ATYP_LENGTH
+ dstAddr.length + dstPort.length + userData.length];
byte[] rsv = Shorts.writeShort((short) RSV);
for (int i = 0; i < rsv.length; i++) {
b[RSV_INDEX + i] = rsv[i];
}
b[FRAG_INDEX] = (byte) currentFragmentNumber;
b[ATYP_INDEX] = addressType.byteValue();
for (int i = 0; i < dstAddr.length; i++) {
b[DST_ADDR_INDEX + i] = dstAddr[i];
}
for (int i = 0; i < dstPort.length; i++) {
b[DST_ADDR_INDEX + dstAddr.length + i] = dstPort[i];
}
for (int i = 0; i < userData.length; i++) {
b[DST_ADDR_INDEX + dstAddr.length + dstPort.length + i] = userData[i];
}
Params params = new Params();
params.currentFragmentNumber = currentFragmentNumber;
params.addressType = addressType;
params.desiredDestinationAddress = desiredDestinationAddress;
params.desiredDestinationPort = desiredDestinationPort;
params.userDataStartIndex = rsv.length + 2 + dstAddr.length + dstPort.length;
params.byteArray = b;
return new UdpRequestHeader(params);
}
private final int currentFragmentNumber;
private final AddressType addressType;
private final String desiredDestinationAddress;
private final int desiredDestinationPort;
private final int userDataStartIndex;
private final byte[] byteArray;
private UdpRequestHeader(final Params params) {
this.currentFragmentNumber = params.currentFragmentNumber;
this.addressType = params.addressType;
this.desiredDestinationAddress = params.desiredDestinationAddress;
this.desiredDestinationPort = params.desiredDestinationPort;
this.userDataStartIndex = params.userDataStartIndex;
this.byteArray = params.byteArray;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof UdpRequestHeader)) {
return false;
}
UdpRequestHeader other = (UdpRequestHeader) obj;
if (this.addressType != other.addressType) {
return false;
}
if (!Arrays.equals(this.byteArray, other.byteArray)) {
return false;
}
if (this.currentFragmentNumber != other.currentFragmentNumber) {
return false;
}
if (this.desiredDestinationAddress == null) {
if (other.desiredDestinationAddress != null) {
return false;
}
} else if (!this.desiredDestinationAddress.equals(other.desiredDestinationAddress)) {
return false;
}
if (this.desiredDestinationPort != other.desiredDestinationPort) {
return false;
}
if (this.userDataStartIndex != other.userDataStartIndex) {
return false;
}
return true;
}
public AddressType getAddressType() {
return this.addressType;
}
public int getCurrentFragmentNumber() {
return this.currentFragmentNumber;
}
public String getDesiredDestinationAddress() {
return this.desiredDestinationAddress;
}
public int getDesiredDestinationPort() {
return this.desiredDestinationPort;
}
public int getUserDataStartIndex() {
return this.userDataStartIndex;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.addressType == null) ? 0 : this.addressType.hashCode());
result = prime * result + Arrays.hashCode(this.byteArray);
result = prime * result + this.currentFragmentNumber;
result = prime * result + ((this.desiredDestinationAddress == null) ? 0 : this.desiredDestinationAddress.hashCode());
result = prime * result + this.desiredDestinationPort;
result = prime * result + this.userDataStartIndex;
return result;
}
public byte[] toByteArray() {
return Arrays.copyOf(this.byteArray, this.byteArray.length);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("UdpRequestHeader [currentFragmentNumber=")
.append(this.currentFragmentNumber)
.append(", addressType=")
.append(this.addressType)
.append(", desiredDestinationAddress=")
.append(this.desiredDestinationAddress)
.append(", desiredDestinationPort=")
.append(this.desiredDestinationPort)
.append(", userDataStartIndex=")
.append(this.userDataStartIndex)
.append("]");
return builder.toString();
}
}
| {
"content_hash": "c717b9195798d367ef6c8fb2fca0bd61",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 119,
"avg_line_length": 34.1140350877193,
"alnum_prop": 0.7288506042684495,
"repo_name": "jh3nd3rs0n/gappsockets.server",
"id": "80d051171cda51e599330451de5b161e663b2e65",
"size": "7778",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/gappsockets/common/socks/UdpRequestHeader.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "235973"
}
],
"symlink_target": ""
} |
import React from "react"
import { Link, graphql } from "gatsby"
import Bio from "../components/bio"
import Layout from "../components/layout"
import SEO from "../components/seo"
import { rhythm } from "../utils/typography"
class BlogIndex extends React.Component {
render() {
const { data } = this.props
const siteTitle = data.site.siteMetadata.title
const posts = data.allMarkdownRemark.edges
return (
<Layout location={this.props.location} title={siteTitle}>
<SEO title="All posts" />
<Bio />
{posts.map(({ node }) => {
const title = node.frontmatter.title || node.fields.slug
return (
<article key={node.fields.slug}>
<header>
<h3
style={{
marginBottom: rhythm(1 / 4),
}}
>
<Link
style={{ boxShadow: `none` }}
to={"blog/" + node.fields.slug}
>
{title}
</Link>
</h3>
<small>{node.frontmatter.date}</small>
</header>
<section>
<p
dangerouslySetInnerHTML={{
__html: node.frontmatter.description || node.excerpt,
}}
/>
</section>
</article>
)
})}
</Layout>
)
}
}
export default BlogIndex
export const pageQuery = graphql`
query {
site {
siteMetadata {
title
}
}
allMarkdownRemark(sort: { fields: [frontmatter___date], order: DESC }) {
edges {
node {
excerpt
fields {
slug
}
frontmatter {
date(formatString: "MMMM Do, YYYY")
title
description
}
}
}
}
}
`
| {
"content_hash": "b71d288ddeb9dac79f434c24b2761b05",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 76,
"avg_line_length": 24.67948717948718,
"alnum_prop": 0.45194805194805193,
"repo_name": "aashnisshah/aashnime",
"id": "d6d8015f573b587ff0ecc8c8e474e739ad182031",
"size": "1925",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pages/blog.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "76"
},
{
"name": "JavaScript",
"bytes": "23508"
},
{
"name": "Python",
"bytes": "2895"
}
],
"symlink_target": ""
} |
package org.elasticsearch.index.shard;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
import org.apache.lucene.index.SnapshotDeletionPolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.engine.InternalEngine;
import org.elasticsearch.index.engine.InternalEngineTests.TranslogHandler;
import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.TranslogConfig;
import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPool.Cancellable;
import org.elasticsearch.threadpool.ThreadPool.Names;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
/**
* Tests how {@linkplain RefreshListeners} interacts with {@linkplain InternalEngine}.
*/
public class RefreshListenersTests extends ESTestCase {
private RefreshListeners listeners;
private Engine engine;
private volatile int maxListeners;
private ThreadPool threadPool;
private Store store;
@Before
public void setupListeners() throws Exception {
// Setup dependencies of the listeners
maxListeners = randomIntBetween(1, 1000);
listeners = new RefreshListeners(
() -> maxListeners,
() -> engine.refresh("too-many-listeners"),
// Immediately run listeners rather than adding them to the listener thread pool like IndexShard does to simplify the test.
Runnable::run,
logger
);
// Now setup the InternalEngine which is much more complicated because we aren't mocking anything
threadPool = new TestThreadPool(getTestName());
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY);
ShardId shardId = new ShardId(new Index("index", "_na_"), 1);
Directory directory = newDirectory();
DirectoryService directoryService = new DirectoryService(shardId, indexSettings) {
@Override
public Directory newDirectory() throws IOException {
return directory;
}
@Override
public long throttleTimeInNanos() {
return 0;
}
};
store = new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId));
IndexWriterConfig iwc = newIndexWriterConfig();
TranslogConfig translogConfig = new TranslogConfig(shardId, createTempDir("translog"), indexSettings,
BigArrays.NON_RECYCLING_INSTANCE);
Engine.EventListener eventListener = new Engine.EventListener() {
@Override
public void onFailedEngine(String reason, @Nullable Exception e) {
// we don't need to notify anybody in this test
}
};
EngineConfig config = new EngineConfig(EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG, shardId, threadPool, indexSettings, null,
store, new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()), newMergePolicy(), iwc.getAnalyzer(),
iwc.getSimilarity(), new CodecService(null, logger), eventListener, new TranslogHandler(shardId.getIndexName(), logger),
IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig,
TimeValue.timeValueMinutes(5), listeners, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP);
engine = new InternalEngine(config);
listeners.setTranslog(engine.getTranslog());
}
@After
public void tearDownListeners() throws Exception {
IOUtils.close(engine, store);
terminate(threadPool);
}
public void testTooMany() throws Exception {
assertFalse(listeners.refreshNeeded());
Engine.Index index = index("1");
// Fill the listener slots
List<DummyRefreshListener> nonForcedListeners = new ArrayList<>(maxListeners);
for (int i = 0; i < maxListeners; i++) {
DummyRefreshListener listener = new DummyRefreshListener();
nonForcedListeners.add(listener);
listeners.addOrNotify(index.getTranslogLocation(), listener);
assertTrue(listeners.refreshNeeded());
}
// We shouldn't have called any of them
for (DummyRefreshListener listener : nonForcedListeners) {
assertNull("Called listener too early!", listener.forcedRefresh.get());
}
// Add one more listener which should cause a refresh.
DummyRefreshListener forcingListener = new DummyRefreshListener();
listeners.addOrNotify(index.getTranslogLocation(), forcingListener);
assertTrue("Forced listener wasn't forced?", forcingListener.forcedRefresh.get());
forcingListener.assertNoError();
// That forces all the listeners through. It would be on the listener ThreadPool but we've made all of those execute immediately.
for (DummyRefreshListener listener : nonForcedListeners) {
assertEquals("Expected listener called with unforced refresh!", Boolean.FALSE, listener.forcedRefresh.get());
listener.assertNoError();
}
assertFalse(listeners.refreshNeeded());
}
public void testAfterRefresh() throws Exception {
Engine.Index index = index("1");
engine.refresh("I said so");
if (randomBoolean()) {
index(randomFrom("1" /* same document */, "2" /* different document */));
if (randomBoolean()) {
engine.refresh("I said so");
}
}
DummyRefreshListener listener = new DummyRefreshListener();
assertTrue(listeners.addOrNotify(index.getTranslogLocation(), listener));
assertFalse(listener.forcedRefresh.get());
listener.assertNoError();
}
/**
* Attempts to add a listener at the same time as a refresh occurs by having a background thread force a refresh as fast as it can while
* adding listeners. This can catch the situation where a refresh happens right as the listener is being added such that the listener
* misses the refresh and has to catch the next one. If the listener wasn't able to properly catch the next one then this would fail.
*/
public void testConcurrentRefresh() throws Exception {
AtomicBoolean run = new AtomicBoolean(true);
Thread refresher = new Thread(() -> {
while (run.get()) {
engine.refresh("test");
}
});
refresher.start();
try {
for (int i = 0; i < 1000; i++) {
Engine.Index index = index("1");
DummyRefreshListener listener = new DummyRefreshListener();
boolean immediate = listeners.addOrNotify(index.getTranslogLocation(), listener);
if (immediate) {
assertNotNull(listener.forcedRefresh.get());
} else {
assertBusy(() -> assertNotNull(listener.forcedRefresh.get()));
}
assertFalse(listener.forcedRefresh.get());
listener.assertNoError();
}
} finally {
run.set(false);
refresher.join();
}
}
/**
* Uses a bunch of threads to index, wait for refresh, and non-realtime get documents to validate that they are visible after waiting
* regardless of what crazy sequence of events causes the refresh listener to fire.
*/
public void testLotsOfThreads() throws Exception {
int threadCount = between(3, 10);
maxListeners = between(1, threadCount * 2);
// This thread just refreshes every once in a while to cause trouble.
Cancellable refresher = threadPool.scheduleWithFixedDelay(() -> engine.refresh("because test"), timeValueMillis(100), Names.SAME);
// These threads add and block until the refresh makes the change visible and then do a non-realtime get.
Thread[] indexers = new Thread[threadCount];
for (int thread = 0; thread < threadCount; thread++) {
final String threadId = String.format(Locale.ROOT, "%04d", thread);
indexers[thread] = new Thread(() -> {
for (int iteration = 1; iteration <= 50; iteration++) {
try {
String testFieldValue = String.format(Locale.ROOT, "%s%04d", threadId, iteration);
Engine.Index index = index(threadId, testFieldValue);
assertEquals(iteration, index.version());
DummyRefreshListener listener = new DummyRefreshListener();
listeners.addOrNotify(index.getTranslogLocation(), listener);
assertBusy(() -> assertNotNull("listener never called", listener.forcedRefresh.get()));
if (threadCount < maxListeners) {
assertFalse(listener.forcedRefresh.get());
}
listener.assertNoError();
Engine.Get get = new Engine.Get(false, index.uid());
try (Engine.GetResult getResult = engine.get(get)) {
assertTrue("document not found", getResult.exists());
assertEquals(iteration, getResult.version());
SingleFieldsVisitor visitor = new SingleFieldsVisitor("test");
getResult.docIdAndVersion().context.reader().document(getResult.docIdAndVersion().docId, visitor);
assertEquals(Arrays.asList(testFieldValue), visitor.fields().get("test"));
}
} catch (Exception t) {
throw new RuntimeException("failure on the [" + iteration + "] iteration of thread [" + threadId + "]", t);
}
}
});
indexers[thread].start();
}
for (Thread indexer: indexers) {
indexer.join();
}
refresher.cancel();
}
private Engine.Index index(String id) {
return index(id, "test");
}
private Engine.Index index(String id, String testFieldValue) {
String type = "test";
String uid = type + ":" + id;
Document document = new Document();
document.add(new TextField("test", testFieldValue, Field.Store.YES));
Field uidField = new Field("_uid", type + ":" + id, UidFieldMapper.Defaults.FIELD_TYPE);
Field versionField = new NumericDocValuesField("_version", Versions.MATCH_ANY);
document.add(uidField);
document.add(versionField);
BytesReference source = new BytesArray(new byte[] { 1 });
ParsedDocument doc = new ParsedDocument(versionField, id, type, null, -1, -1, Arrays.asList(document), source, null);
Engine.Index index = new Engine.Index(new Term("_uid", uid), doc);
engine.index(index);
return index;
}
private static class DummyRefreshListener implements Consumer<Boolean> {
/**
* When the listener is called this captures it's only argument.
*/
AtomicReference<Boolean> forcedRefresh = new AtomicReference<>();
private volatile Exception error;
@Override
public void accept(Boolean forcedRefresh) {
try {
assertNotNull(forcedRefresh);
Boolean oldValue = this.forcedRefresh.getAndSet(forcedRefresh);
assertNull("Listener called twice", oldValue);
} catch (Exception e) {
error = e;
}
}
public void assertNoError() {
if (error != null) {
throw new RuntimeException(error);
}
}
}
}
| {
"content_hash": "5c2433add0e3f7b07aa9e7c562cd3677",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 140,
"avg_line_length": 45.754208754208754,
"alnum_prop": 0.6517771727132239,
"repo_name": "liweinan0423/elasticsearch",
"id": "05147d4a72a172c149849df75bbdb1e927113805",
"size": "14377",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "10561"
},
{
"name": "Batchfile",
"bytes": "13128"
},
{
"name": "Emacs Lisp",
"bytes": "3341"
},
{
"name": "FreeMarker",
"bytes": "45"
},
{
"name": "Groovy",
"bytes": "269282"
},
{
"name": "HTML",
"bytes": "3397"
},
{
"name": "Java",
"bytes": "38679871"
},
{
"name": "Perl",
"bytes": "7271"
},
{
"name": "Python",
"bytes": "52711"
},
{
"name": "Shell",
"bytes": "108256"
}
],
"symlink_target": ""
} |
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace YanZhiwei.DotNet.QQWry.Utilities.Tests
{
[TestClass()]
public class QQWryLocatorTests
{
[TestMethod()]
public void QueryTest()
{
QQWryLocator _qqWry = new QQWryLocator(@"D:\OneDrive\软件\开发\qqwry\qqwry.dat");
IPLocation _ip = _qqWry.Query("116.226.81.32");
Assert.AreEqual("上海市 电信", string.Format("{0} {1}", _ip.Country, _ip.Local));
}
}
} | {
"content_hash": "7b2b9256cd4f70bbaf16e51f6091a53f",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 89,
"avg_line_length": 30.3125,
"alnum_prop": 0.6061855670103092,
"repo_name": "YanZhiwei/DotNet.Utilities",
"id": "6cf6afa4c8724b403c2d56d6bbcdd95ccd7c4f77",
"size": "505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "YanZhiwei.DotNet.QQWry.UtilitiesTests/QQWryLocatorTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "29999"
},
{
"name": "Batchfile",
"bytes": "316"
},
{
"name": "C#",
"bytes": "3623746"
},
{
"name": "CSS",
"bytes": "2452"
},
{
"name": "HTML",
"bytes": "912"
},
{
"name": "JavaScript",
"bytes": "307517"
},
{
"name": "PHP",
"bytes": "834"
},
{
"name": "PLpgSQL",
"bytes": "525"
}
],
"symlink_target": ""
} |
/*
* Open source project
*
* By: Welliton Fernandes Leal
* Github: https://github.com/wellfernandes
*
* Network Protocol Verification System
* by means of model-based testing
*
*/
package br.edu.ifpr.londrina.controller;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import javax.swing.JOptionPane;
import br.edu.ifpr.londrina.model.FinalStateMachine;
import br.edu.ifpr.londrina.model.InputTrace;
import br.edu.ifpr.londrina.model.InputXML;
import br.edu.ifpr.londrina.model.StateModel;
import br.edu.ifpr.londrina.model.StateTransition;
import br.edu.ifpr.londrina.model.XMLRead;
import br.edu.ifpr.londrina.view.AboutDeveloper;
import br.edu.ifpr.londrina.view.AboutSoftware;
import br.edu.ifpr.londrina.view.PrincipalView;
public class Controller{
private PrincipalView principalView;
private FinalStateMachine fsm;
private AboutSoftware aboutSoft;
private AboutDeveloper aboutDev;
private String varChoice = null;
private String currentEvent;
private String targetState;
private String lastState;
private String lastEvent;
private String waitingEvent;
//List about test report - txt File
private static LinkedList<String> testReport = new LinkedList<String>();
String tempEvent;
private InputXML inputXML;
XMLRead xmlRead;
//Controller Initialize view
public void initializeView(){
principalView = new PrincipalView();
}
//Controller view - Close/Cancel/AboutSoftware/AboutDeveloper
public void viewOperation(String choice){
if(choice.equals("close")){
//Close the Software
System.out.println("Closing - Do you would like to save this project?");
int dell = JOptionPane.showConfirmDialog(null, "Do you would like to save this project?");
if(dell == JOptionPane.CANCEL_OPTION){
System.out.println("Canceled - Would like not close the project");
}else if(dell == JOptionPane.OK_OPTION){
System.out.println("Yes - Would like to save the project");
JOptionPane.showMessageDialog(null, "Not implemented yet");
}
else if(dell == JOptionPane.NO_OPTION){
System.out.println("Save project: No.");
}
System.exit(1);
}
else if(choice.equals("aboutSoftware")){
//About Soft. View
aboutSoft = new AboutSoftware();
}
else if(choice.equals("aboutDeveloper")){
//About Develop. View
aboutDev = new AboutDeveloper();
}
else{
//If the frame to cause a error
int dell = JOptionPane.showConfirmDialog(null, "Sorry about that, there is a error on your view.");
if(dell == JOptionPane.CANCEL_OPTION){
System.out.println("Canceled");
}else if(dell == JOptionPane.OK_OPTION){
System.out.println("Ok");
}
else{
System.out.println("default");
}
}
}
//Controller Import Trace file
public boolean importTraceFile(){
System.out.println("Importing File...");
InputTrace inputTraceFile = new InputTrace();
if(inputTraceFile.loadFile() == true){
fsm = FinalStateMachine.getInstance();
fsm.traceSplit(InputTrace.getTraceList());
return true;
}else{
return false;
}
}
//Controller Open XML file
public boolean openProject(){
System.out.println("Open XML File...");
inputXML = new InputXML();
if(inputXML.loadXML() == true){
XMLRead statesXML = new XMLRead();
statesXML.parsing(inputXML.getFileChooserXML().getSelectedFile().getPath());
return true;
}else{
return false;
}
}
//Controller Save Project XML
public void saveProject(){
System.out.println("Saving the project...");
JOptionPane.showMessageDialog(null, "Not implemented...");
}
//Controller Save As
public void saveAsProject(){
System.out.println("Saving as...");
JOptionPane.showMessageDialog(null, "Not implemented...");
}
//Controller Verify Modeling VS Trace
public boolean verifyModeling(String currentState){
int lines = 0;
fsm = FinalStateMachine.getInstance();
ArrayList<String> event = fsm.getArrayEvents();
int indexEvent=0;
for (String e : event) {
currentEvent = event.get(indexEvent);
List<StateModel> stateList = fsm.getStatesList();
for (StateModel states : stateList) {
List<StateTransition> transitionTemp = states.getTransactionsList();
if(states.getState_name().equalsIgnoreCase((currentState)) && !states.getTransactionsList().isEmpty()){
//Last state 1
lastState = currentState;
System.out.println("\n\nCURRENT STATE: "+currentState);
System.out.println("Transitions: "+states.getTransactionsList().size());
lastState = currentState;
//Check transitions of state
int countT =1;
for (StateTransition transition : transitionTemp) {
if(transition.getTransitionEvent().equalsIgnoreCase((currentEvent))){
System.out.println("CURRENT EVENT: "+ currentEvent);
waitingEvent = currentEvent;
//Target - Next State
targetState = transition.getTarget();
System.out.println("TARGET STATE: "+targetState+ "\n" );
//Add to Test Report - FOR TXT FILE
testReport.add("\nCurrent State: "+currentState+ "\n"+"Transitions: "+ states.getTransactionsList().size()+"\n"
+"Current event: "+currentEvent+"\n"+"Target State: "+ targetState+"\nTrace line: "+lines+"\n\n");
currentState = targetState;
System.out.println("TRACE LINE: "+lines);
lines++;
break;
}
else if(transition.getTransitionEvent().isEmpty() || transition.getTarget().equals(null)){
return false;
}
if(!transition.getTransitionEvent().equalsIgnoreCase((currentEvent)) && countT == transitionTemp.size()){
System.out.println("CURRENT EVENT: "+ currentEvent);
waitingEvent = currentEvent;
return false;
}
countT++;
}
//LastState 2
lastState = currentState;
lastEvent = currentEvent;
}
currentState = targetState;
//END RUN FINAL STATE MACHINE
if(lines == fsm.getArrayEvents().size()){
return true;
}
if(e.length() == -1){
return true;
}
}
indexEvent++;
}
return false;
}
//Controller clear trace file imported
public boolean clearFileImported(){
//Cleaning the trace list imported
if(InputTrace.getTraceList().size() != 0){
InputTrace.getTraceList().clear();
return true;
}
else if(InputTrace.getTraceList().size() == 0){
return false;
}
else{
JOptionPane.showMessageDialog(null, "Sorry! Cant to clear the file.\nInternal error!");
return false;
}
}
/**--------------------------------------------------------------
*----------------------GETTERS AND SETTERS----------------------
*---------------------------------------------------------------
**/
//Get VarChoice
public String getVarChoice() {
return varChoice;
}
//Set VarChoice
public void setVarChoice(String varChoice) {
this.varChoice = varChoice;
}
//Get PrincipalView
public PrincipalView getPrincipalView() {
return principalView;
}
//Set PrincipalView
public void setPrincipalView(PrincipalView principalView) {
this.principalView = principalView;
}
//get AboutSoftware
public AboutSoftware getAboutSoft() {
return aboutSoft;
}
//Set AboutSoftware
public void setAboutSoft(AboutSoftware aboutSoft) {
this.aboutSoft = aboutSoft;
}
//Get AboutDeloper
public AboutDeveloper getAboutDev() {
return aboutDev;
}
//Set AboutDev
public void setAboutDev(AboutDeveloper aboutDev) {
this.aboutDev = aboutDev;
}
//Get XML Readl
public XMLRead getXmlRead() {
return xmlRead;
}
//set XML ReadXML
public void setXmlRead(XMLRead xmlRead) {
this.xmlRead = xmlRead;
}
//Get LastState
public String getLastState() {
return lastState;
}
//Set LastState
public void setLastState(String lastState) {
this.lastState = lastState;
}
//Get LastEvent
public String getLastEvent() {
return lastEvent;
}
//Set LastEvent
public void setLastEvent(String lastEvent) {
this.lastEvent = lastEvent;
}
//Get WaintingEvent
public String getWaitingEvent() {
return waitingEvent;
}
//Set WaintingEvent
public void setWaitingEvent(String waitingEvent) {
this.waitingEvent = waitingEvent;
}
//Get CurrentEvent
public String getCurrentEvent() {
return currentEvent;
}
//Set CurrentEvent
public void setCurrentEvent(String currentEvent) {
this.currentEvent = currentEvent;
}
//Get TargetState
public String getTargetState() {
return targetState;
}
//Set TargetState
public void setTargetState(String targetState) {
this.targetState = targetState;
}
//Get LinkedList TesteRport
public static LinkedList<String> getTestReport() {
return testReport;
}
//Set LinkedList TesteRport
public static void setTestReport(LinkedList<String> testReport) {
Controller.testReport = testReport;
}
//Get TempEvent
public String getTempEvent() {
return tempEvent;
}
//Set TempEvent
public void setTempEvent(String tempEvent) {
this.tempEvent = tempEvent;
}
//Get InputXML
public InputXML getInputXML() {
return inputXML;
}
//Set InputXML
public void setInputXML(InputXML inputXML) {
this.inputXML = inputXML;
}
}
| {
"content_hash": "a6944d70e2fc08fec4c80a083249f90c",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 120,
"avg_line_length": 25.151761517615178,
"alnum_prop": 0.6767589699385842,
"repo_name": "wellfernandes/University",
"id": "9c58de1081b47c58c11789596b5fd8cdb71df7cb",
"size": "9281",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "MSRP/src/br/edu/ifpr/londrina/controller/Controller.java",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "56053"
},
{
"name": "Tcl",
"bytes": "43209"
}
],
"symlink_target": ""
} |
package io.druid.query.lookup.namespace;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.druid.guice.annotations.Json;
import io.druid.java.util.common.IAE;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.UOE;
import io.druid.java.util.common.parsers.CSVParser;
import io.druid.java.util.common.parsers.DelimitedParser;
import io.druid.java.util.common.parsers.JSONParser;
import io.druid.java.util.common.parsers.Parser;
import org.joda.time.Period;
import javax.annotation.Nullable;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
/**
*
*/
@JsonTypeName("uri")
public class UriExtractionNamespace implements ExtractionNamespace
{
@JsonProperty
private final URI uri;
@JsonProperty
private final URI uriPrefix;
@JsonProperty
private final FlatDataParser namespaceParseSpec;
@JsonProperty
private final String fileRegex;
@JsonProperty
private final Period pollPeriod;
@JsonCreator
public UriExtractionNamespace(
@JsonProperty(value = "uri", required = false)
URI uri,
@JsonProperty(value = "uriPrefix", required = false)
URI uriPrefix,
@JsonProperty(value = "fileRegex", required = false)
String fileRegex,
@JsonProperty(value = "namespaceParseSpec", required = true)
FlatDataParser namespaceParseSpec,
@Min(0) @Nullable @JsonProperty(value = "pollPeriod", required = false)
Period pollPeriod,
@Deprecated
@JsonProperty(value = "versionRegex", required = false)
String versionRegex
)
{
this.uri = uri;
this.uriPrefix = uriPrefix;
if ((uri != null) == (uriPrefix != null)) {
throw new IAE("Either uri xor uriPrefix required");
}
this.namespaceParseSpec = Preconditions.checkNotNull(namespaceParseSpec, "namespaceParseSpec");
this.pollPeriod = pollPeriod == null ? Period.ZERO : pollPeriod;
this.fileRegex = fileRegex == null ? versionRegex : fileRegex;
if (fileRegex != null && versionRegex != null) {
throw new IAE("Cannot specify both versionRegex and fileRegex. versionRegex is deprecated");
}
if (uri != null && this.fileRegex != null) {
throw new IAE("Cannot define both uri and fileRegex");
}
if (this.fileRegex != null) {
try {
Pattern.compile(this.fileRegex);
}
catch (PatternSyntaxException ex) {
throw new IAE(ex, "Could not parse `fileRegex` [%s]", this.fileRegex);
}
}
}
public String getFileRegex()
{
return fileRegex;
}
public FlatDataParser getNamespaceParseSpec()
{
return this.namespaceParseSpec;
}
public URI getUri()
{
return uri;
}
public URI getUriPrefix()
{
return uriPrefix;
}
@Override
public long getPollMs()
{
return pollPeriod.toStandardDuration().getMillis();
}
@Override
public String toString()
{
return "UriExtractionNamespace{" +
"uri=" + uri +
", uriPrefix=" + uriPrefix +
", namespaceParseSpec=" + namespaceParseSpec +
", fileRegex='" + fileRegex + '\'' +
", pollPeriod=" + pollPeriod +
'}';
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UriExtractionNamespace that = (UriExtractionNamespace) o;
if (getUri() != null ? !getUri().equals(that.getUri()) : that.getUri() != null) {
return false;
}
if (getUriPrefix() != null ? !getUriPrefix().equals(that.getUriPrefix()) : that.getUriPrefix() != null) {
return false;
}
if (!getNamespaceParseSpec().equals(that.getNamespaceParseSpec())) {
return false;
}
if (getFileRegex() != null ? !getFileRegex().equals(that.getFileRegex()) : that.getFileRegex() != null) {
return false;
}
return pollPeriod.equals(that.pollPeriod);
}
@Override
public int hashCode()
{
int result = getUri() != null ? getUri().hashCode() : 0;
result = 31 * result + (getUriPrefix() != null ? getUriPrefix().hashCode() : 0);
result = 31 * result + getNamespaceParseSpec().hashCode();
result = 31 * result + (getFileRegex() != null ? getFileRegex().hashCode() : 0);
result = 31 * result + pollPeriod.hashCode();
return result;
}
private static class DelegateParser implements Parser<String, String>
{
private final Parser<String, Object> delegate;
private final String key;
private final String value;
private DelegateParser(
Parser<String, Object> delegate,
@NotNull String key,
@NotNull String value
)
{
this.delegate = delegate;
this.key = key;
this.value = value;
}
@Override
public Map<String, String> parse(String input)
{
final Map<String, Object> inner = delegate.parse(input);
final String k = Preconditions.checkNotNull(
inner.get(key),
"Key column [%s] missing data in line [%s]",
key,
input
).toString(); // Just in case is long
final Object val = inner.get(value);
if (val == null) {
// Skip null or missing values, treat them as if there were no row at all.
return ImmutableMap.of();
}
return ImmutableMap.of(k, val.toString());
}
@Override
public void setFieldNames(Iterable<String> fieldNames)
{
delegate.setFieldNames(fieldNames);
}
@Override
public List<String> getFieldNames()
{
return delegate.getFieldNames();
}
}
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "format")
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "csv", value = CSVFlatDataParser.class),
@JsonSubTypes.Type(name = "tsv", value = TSVFlatDataParser.class),
@JsonSubTypes.Type(name = "customJson", value = JSONFlatDataParser.class),
@JsonSubTypes.Type(name = "simpleJson", value = ObjectMapperFlatDataParser.class)
})
public static interface FlatDataParser
{
Parser<String, String> getParser();
}
@JsonTypeName("csv")
public static class CSVFlatDataParser implements FlatDataParser
{
private final Parser<String, String> parser;
private final List<String> columns;
private final String keyColumn;
private final String valueColumn;
@JsonCreator
public CSVFlatDataParser(
@JsonProperty("columns") List<String> columns,
@JsonProperty("keyColumn") final String keyColumn,
@JsonProperty("valueColumn") final String valueColumn,
@JsonProperty("hasHeaderRow") boolean hasHeaderRow,
@JsonProperty("skipHeaderRows") int skipHeaderRows
)
{
Preconditions.checkArgument(
Preconditions.checkNotNull(columns, "`columns` list required").size() > 1,
"Must specify more than one column to have a key value pair"
);
Preconditions.checkArgument(
!(Strings.isNullOrEmpty(keyColumn) ^ Strings.isNullOrEmpty(valueColumn)),
"Must specify both `keyColumn` and `valueColumn` or neither `keyColumn` nor `valueColumn`"
);
this.columns = columns;
this.keyColumn = Strings.isNullOrEmpty(keyColumn) ? columns.get(0) : keyColumn;
this.valueColumn = Strings.isNullOrEmpty(valueColumn) ? columns.get(1) : valueColumn;
Preconditions.checkArgument(
columns.contains(this.keyColumn),
"Column [%s] not found int columns: %s",
this.keyColumn,
Arrays.toString(columns.toArray())
);
Preconditions.checkArgument(
columns.contains(this.valueColumn),
"Column [%s] not found int columns: %s",
this.valueColumn,
Arrays.toString(columns.toArray())
);
this.parser = new DelegateParser(
new CSVParser(null, columns, hasHeaderRow, skipHeaderRows),
this.keyColumn,
this.valueColumn
);
}
@VisibleForTesting
CSVFlatDataParser(
List<String> columns,
String keyColumn,
String valueColumn
)
{
this(columns, keyColumn, valueColumn, false, 0);
}
@JsonProperty
public List<String> getColumns()
{
return columns;
}
@JsonProperty
public String getKeyColumn()
{
return this.keyColumn;
}
@JsonProperty
public String getValueColumn()
{
return this.valueColumn;
}
@Override
public Parser<String, String> getParser()
{
return parser;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CSVFlatDataParser that = (CSVFlatDataParser) o;
if (!getColumns().equals(that.getColumns())) {
return false;
}
if (!getKeyColumn().equals(that.getKeyColumn())) {
return false;
}
return getValueColumn().equals(that.getValueColumn());
}
@Override
public String toString()
{
return StringUtils.format(
"CSVFlatDataParser = { columns = %s, keyColumn = %s, valueColumn = %s }",
Arrays.toString(columns.toArray()),
keyColumn,
valueColumn
);
}
}
@JsonTypeName("tsv")
public static class TSVFlatDataParser implements FlatDataParser
{
private final Parser<String, String> parser;
private final List<String> columns;
private final String delimiter;
private final String listDelimiter;
private final String keyColumn;
private final String valueColumn;
@JsonCreator
public TSVFlatDataParser(
@JsonProperty("columns") List<String> columns,
@JsonProperty("delimiter") String delimiter,
@JsonProperty("listDelimiter") String listDelimiter,
@JsonProperty("keyColumn") final String keyColumn,
@JsonProperty("valueColumn") final String valueColumn,
@JsonProperty("hasHeaderRow") boolean hasHeaderRow,
@JsonProperty("skipHeaderRows") int skipHeaderRows
)
{
Preconditions.checkArgument(
Preconditions.checkNotNull(columns, "`columns` list required").size() > 1,
"Must specify more than one column to have a key value pair"
);
final DelimitedParser delegate = new DelimitedParser(
Strings.emptyToNull(delimiter),
Strings.emptyToNull(listDelimiter),
hasHeaderRow,
skipHeaderRows
);
Preconditions.checkArgument(
!(Strings.isNullOrEmpty(keyColumn) ^ Strings.isNullOrEmpty(valueColumn)),
"Must specify both `keyColumn` and `valueColumn` or neither `keyColumn` nor `valueColumn`"
);
delegate.setFieldNames(columns);
this.columns = columns;
this.delimiter = delimiter;
this.listDelimiter = listDelimiter;
this.keyColumn = Strings.isNullOrEmpty(keyColumn) ? columns.get(0) : keyColumn;
this.valueColumn = Strings.isNullOrEmpty(valueColumn) ? columns.get(1) : valueColumn;
Preconditions.checkArgument(
columns.contains(this.keyColumn),
"Column [%s] not found int columns: %s",
this.keyColumn,
Arrays.toString(columns.toArray())
);
Preconditions.checkArgument(
columns.contains(this.valueColumn),
"Column [%s] not found int columns: %s",
this.valueColumn,
Arrays.toString(columns.toArray())
);
this.parser = new DelegateParser(delegate, this.keyColumn, this.valueColumn);
}
@VisibleForTesting
TSVFlatDataParser(
List<String> columns,
String delimiter,
String listDelimiter,
String keyColumn,
String valueColumn
)
{
this(columns, delimiter, listDelimiter, keyColumn, valueColumn, false, 0);
}
@JsonProperty
public List<String> getColumns()
{
return columns;
}
@JsonProperty
public String getKeyColumn()
{
return this.keyColumn;
}
@JsonProperty
public String getValueColumn()
{
return this.valueColumn;
}
@JsonProperty
public String getListDelimiter()
{
return listDelimiter;
}
@JsonProperty
public String getDelimiter()
{
return delimiter;
}
@Override
public Parser<String, String> getParser()
{
return parser;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TSVFlatDataParser that = (TSVFlatDataParser) o;
if (!getColumns().equals(that.getColumns())) {
return false;
}
if ((getDelimiter() == null) ? that.getDelimiter() == null : getDelimiter().equals(that.getDelimiter())) {
return false;
}
if (!getKeyColumn().equals(that.getKeyColumn())) {
return false;
}
return getValueColumn().equals(that.getValueColumn());
}
@Override
public String toString()
{
return StringUtils.format(
"TSVFlatDataParser = { columns = %s, delimiter = '%s', listDelimiter = '%s',keyColumn = %s, valueColumn = %s }",
Arrays.toString(columns.toArray()),
delimiter,
listDelimiter,
keyColumn,
valueColumn
);
}
}
@JsonTypeName("customJson")
public static class JSONFlatDataParser implements FlatDataParser
{
private final Parser<String, String> parser;
private final String keyFieldName;
private final String valueFieldName;
@JsonCreator
public JSONFlatDataParser(
@JacksonInject @Json ObjectMapper jsonMapper,
@JsonProperty("keyFieldName") final String keyFieldName,
@JsonProperty("valueFieldName") final String valueFieldName
)
{
Preconditions.checkArgument(!Strings.isNullOrEmpty(keyFieldName), "[keyFieldName] cannot be empty");
Preconditions.checkArgument(!Strings.isNullOrEmpty(valueFieldName), "[valueFieldName] cannot be empty");
this.keyFieldName = keyFieldName;
this.valueFieldName = valueFieldName;
// Copy jsonMapper; don't want to share canonicalization tables, etc., with the global ObjectMapper.
this.parser = new DelegateParser(
new JSONParser(jsonMapper.copy(), ImmutableList.of(keyFieldName, valueFieldName)),
keyFieldName,
valueFieldName
);
}
@JsonProperty
public String getKeyFieldName()
{
return this.keyFieldName;
}
@JsonProperty
public String getValueFieldName()
{
return this.valueFieldName;
}
@Override
public Parser<String, String> getParser()
{
return this.parser;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JSONFlatDataParser that = (JSONFlatDataParser) o;
if (!getKeyFieldName().equals(that.getKeyFieldName())) {
return false;
}
return getValueFieldName().equals(that.getValueFieldName());
}
@Override
public String toString()
{
return StringUtils.format(
"JSONFlatDataParser = { keyFieldName = %s, valueFieldName = %s }",
keyFieldName,
valueFieldName
);
}
}
@JsonTypeName("simpleJson")
public static class ObjectMapperFlatDataParser implements FlatDataParser
{
private static final TypeReference<Map<String, String>> MAP_STRING_STRING = new TypeReference<Map<String, String>>()
{
};
private final Parser<String, String> parser;
@JsonCreator
public ObjectMapperFlatDataParser(
final @JacksonInject @Json ObjectMapper jsonMapper
)
{
// There's no point canonicalizing field names, we expect them to all be unique.
final JsonFactory jsonFactory = jsonMapper.getFactory().copy();
jsonFactory.configure(JsonFactory.Feature.CANONICALIZE_FIELD_NAMES, false);
parser = new Parser<String, String>()
{
@Override
public Map<String, String> parse(String input)
{
try {
return jsonFactory.createParser(input).readValueAs(MAP_STRING_STRING);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
@Override
public void setFieldNames(Iterable<String> fieldNames)
{
throw new UOE("No field names available");
}
@Override
public List<String> getFieldNames()
{
throw new UOE("No field names available");
}
};
}
@Override
public Parser<String, String> getParser()
{
return parser;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return true;
}
@Override
public String toString()
{
return "ObjectMapperFlatDataParser = { }";
}
}
}
| {
"content_hash": "01d95339c4ca4eb4e34a2e4784c97a91",
"timestamp": "",
"source": "github",
"line_count": 644,
"max_line_length": 122,
"avg_line_length": 28.09472049689441,
"alnum_prop": 0.6412424694633284,
"repo_name": "solimant/druid",
"id": "445237d2f3116a0f4228f0c473c921d6c6fc16aa",
"size": "18898",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1406"
},
{
"name": "CSS",
"bytes": "11623"
},
{
"name": "HTML",
"bytes": "26739"
},
{
"name": "Java",
"bytes": "16545418"
},
{
"name": "JavaScript",
"bytes": "295150"
},
{
"name": "Makefile",
"bytes": "659"
},
{
"name": "PostScript",
"bytes": "5"
},
{
"name": "Protocol Buffer",
"bytes": "729"
},
{
"name": "R",
"bytes": "17002"
},
{
"name": "Roff",
"bytes": "3617"
},
{
"name": "Shell",
"bytes": "4892"
},
{
"name": "TeX",
"bytes": "399444"
},
{
"name": "Thrift",
"bytes": "199"
}
],
"symlink_target": ""
} |
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-accessanalyzer/types'
require_relative 'aws-sdk-accessanalyzer/client_api'
require_relative 'aws-sdk-accessanalyzer/plugins/endpoints.rb'
require_relative 'aws-sdk-accessanalyzer/client'
require_relative 'aws-sdk-accessanalyzer/errors'
require_relative 'aws-sdk-accessanalyzer/resource'
require_relative 'aws-sdk-accessanalyzer/endpoint_parameters'
require_relative 'aws-sdk-accessanalyzer/endpoint_provider'
require_relative 'aws-sdk-accessanalyzer/endpoints'
require_relative 'aws-sdk-accessanalyzer/customizations'
# This module provides support for Access Analyzer. This module is available in the
# `aws-sdk-accessanalyzer` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# access_analyzer = Aws::AccessAnalyzer::Client.new
# resp = access_analyzer.apply_archive_rule(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from Access Analyzer are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::AccessAnalyzer::Errors::ServiceError
# # rescues all Access Analyzer API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::AccessAnalyzer
GEM_VERSION = '1.31.0'
end
| {
"content_hash": "8cf9537f00819a2fb913b6210fe0015b",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 83,
"avg_line_length": 30.382978723404257,
"alnum_prop": 0.7598039215686274,
"repo_name": "aws/aws-sdk-ruby",
"id": "562f359729b6b38338c848d9fe825d9d15fda489",
"size": "1671",
"binary": false,
"copies": "1",
"ref": "refs/heads/version-3",
"path": "gems/aws-sdk-accessanalyzer/lib/aws-sdk-accessanalyzer.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Gherkin",
"bytes": "109092"
},
{
"name": "HTML",
"bytes": "1253"
},
{
"name": "JavaScript",
"bytes": "10266"
},
{
"name": "Mustache",
"bytes": "51225"
},
{
"name": "Ruby",
"bytes": "183956484"
}
],
"symlink_target": ""
} |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2017.11.30 at 08:24:17 PM JST
//
package eu.datex2.schema._2_0rc1._2_0;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for Mobility complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Mobility">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="mobilityType" type="{http://datex2.eu/schema/2_0RC1/2_0}MobilityEnum"/>
* <element name="mobilityExtension" type="{http://datex2.eu/schema/2_0RC1/2_0}ExtensionType" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Mobility", propOrder = {
"mobilityType",
"mobilityExtension"
})
public class Mobility {
@XmlElement(required = true)
@XmlSchemaType(name = "string")
protected MobilityEnum mobilityType;
protected ExtensionType mobilityExtension;
/**
* Gets the value of the mobilityType property.
*
* @return
* possible object is
* {@link MobilityEnum }
*
*/
public MobilityEnum getMobilityType() {
return mobilityType;
}
/**
* Sets the value of the mobilityType property.
*
* @param value
* allowed object is
* {@link MobilityEnum }
*
*/
public void setMobilityType(MobilityEnum value) {
this.mobilityType = value;
}
/**
* Gets the value of the mobilityExtension property.
*
* @return
* possible object is
* {@link ExtensionType }
*
*/
public ExtensionType getMobilityExtension() {
return mobilityExtension;
}
/**
* Sets the value of the mobilityExtension property.
*
* @param value
* allowed object is
* {@link ExtensionType }
*
*/
public void setMobilityExtension(ExtensionType value) {
this.mobilityExtension = value;
}
}
| {
"content_hash": "d28f262e62a7a34dce0240b2cdd0bf24",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 122,
"avg_line_length": 27.051020408163264,
"alnum_prop": 0.6363636363636364,
"repo_name": "laidig/siri-20-java",
"id": "74ca2bc279788cd6c6efec7b0c72f989fdf276bf",
"size": "2651",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/eu/datex2/schema/_2_0rc1/_2_0/Mobility.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "4902289"
}
],
"symlink_target": ""
} |
cask 'deploymate' do
version '1.3.0'
sha256 'efaa7419c90041420a16fb222bc8fe534302ce245cb958f2eddcb8fdb6682f81'
# amazonaws.com is the official download host per the vendor homepage
url "https://s3.amazonaws.com/deploymate/download/Deploymate#{version.delete('.')}.zip"
appcast 'http://www.deploymateapp.com/deploymate-updates.xml',
checkpoint: 'e50a4e4b51c525f30452d09033e669c2c5c78817997fc4d81ebaa77be1ea7cd5'
name 'Deploymate'
homepage 'http://www.deploymateapp.com'
license :commercial
app 'Deploymate.app'
zap delete: [
'~/Library/Caches/com.ivanvasic.deploymate',
'~/Library/Preferences/com.ivanvasic.deploymate.plist',
'~/Library/Preferences/com.ivanvasic.deploymate.LSSharedFileList.plist',
]
end
| {
"content_hash": "532d494f9c6c01c306090ae41da2ac78",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 89,
"avg_line_length": 39.95,
"alnum_prop": 0.718397997496871,
"repo_name": "tedbundyjr/homebrew-cask",
"id": "9507db5ccf8282e013556b9872503fb82950313b",
"size": "799",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Casks/deploymate.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Ruby",
"bytes": "1876116"
},
{
"name": "Shell",
"bytes": "62036"
}
],
"symlink_target": ""
} |
package parser.expr.objectcreationexpr;
import java.io.Serializable;
public class AnonymousClassMulti {
public static String main() {
Serializable sr = new Serializable () {
@Override
public String toString() {
return "sr";
}
};
Serializable sr2 = new Serializable () {
@Override
public String toString() {
return "dd";
}
};
return sr.toString() + sr2.toString();
}
} | {
"content_hash": "fbfe71d08cc3f7c92a9f0bff5e7e8a0e",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 48,
"avg_line_length": 23.727272727272727,
"alnum_prop": 0.5153256704980843,
"repo_name": "arguslab/Argus-SAF",
"id": "730a29f38339613b4511b4d144b02dcb5294de77",
"size": "882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jawa/src/test/resources/java/parser/expr/objectcreationexpr/AnonymousClassMulti.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1919"
},
{
"name": "Batchfile",
"bytes": "2289"
},
{
"name": "Java",
"bytes": "201169"
},
{
"name": "Makefile",
"bytes": "702"
},
{
"name": "Python",
"bytes": "301479"
},
{
"name": "Scala",
"bytes": "2333283"
},
{
"name": "Shell",
"bytes": "14609"
}
],
"symlink_target": ""
} |
// Appenders should implement an interface
// And the type of the <report> object passed to the append method should be documented in a bean
// For now, we only have one appender anyway
/**
* Appender that will output the report as a JSON string inside a hidden DIV so that browser drivers such as Selenium
* can retrieve it
*/
Aria.classDefinition({
$classpath : "aria.tester.runner.appenders.JsonTextDivAppender",
$dependencies : ['aria.utils.Dom', 'aria.utils.Json'],
$statics : {
REPORT_DIV_ID : 'testReport'
},
$prototype : {
_destroyReportDiv : function () {
var div = aria.utils.Dom.getElementById(this.REPORT_DIV_ID);
if (div) {
div.parentNode.removeChild(div);
}
},
_createReportDiv : function (content) {
var div = aria.utils.Dom.getElementById(this.REPORT_DIV_ID);
if (!div) {
var document = Aria.$window.document;
div = document.createElement("DIV");
div.id = this.REPORT_DIV_ID;
div.style.display = "none";
div.innerHTML = content;
document.body.appendChild(div);
}
},
/**
* Append the report
* @param {Object} report
*/
append : function (report) {
this._destroyReportDiv();
this._createReportDiv(aria.utils.Json.convertToJsonString(report));
}
}
});
| {
"content_hash": "9f704778fae5c0f8667e3efa422436cb",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 117,
"avg_line_length": 35.72093023255814,
"alnum_prop": 0.5533854166666666,
"repo_name": "flongo/ariatemplates",
"id": "870fee55a1beeaf44e034773fc9500e16b5b1055",
"size": "2142",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/aria/tester/runner/appenders/JsonTextDivAppender.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "166147"
},
{
"name": "JavaScript",
"bytes": "8013161"
}
],
"symlink_target": ""
} |
<div class="commune_descr limited">
<p>
Saint-Thurien est
un village
localisé dans le département de Finistère en Bretagne. Elle comptait 869 habitants en 2008.</p>
<p>Si vous envisagez de demenager à Saint-Thurien, vous pourrez aisément trouver une maison à acheter. </p>
<p>À coté de Saint-Thurien sont situées les villes de
<a href="{{VLROOT}}/immobilier/querrien_29230/">Querrien</a> localisée à 6 km, 1 659 habitants,
<a href="{{VLROOT}}/immobilier/loctudy_29135/">Loctudy</a> localisée à 11 km, 4 045 habitants,
<a href="{{VLROOT}}/immobilier/scaer_29274/">Scaër</a> localisée à 9 km, 5 123 habitants,
<a href="{{VLROOT}}/immobilier/lanvenegen_56105/">Lanvénégen</a> localisée à 7 km, 1 184 habitants,
<a href="{{VLROOT}}/immobilier/trevoux_29300/">Le Trévoux</a> à 7 km, 1 368 habitants,
<a href="{{VLROOT}}/immobilier/mellac_29147/">Mellac</a> située à 7 km, 2 537 habitants,
entre autres. De plus, Saint-Thurien est située à seulement 29 km de <a href="{{VLROOT}}/immobilier/lanester_56098/">Lanester</a>.</p>
<p>La ville offre quelques équipements, elle propose entre autres un terrain de tennis, un terrain de sport et une boucle de randonnée.</p>
<p>Le nombre de logements, à Saint-Thurien, était réparti en 2011 en 26 appartements et 528 maisons soit
un marché plutôt équilibré.</p>
<p>À Saint-Thurien, le prix moyen à la vente d'un appartement se situe à 2 226 € du m² en vente. Le prix moyen d'une maison à l'achat se situe à 1 398 € du m². À la location le prix moyen se situe à 5,81 € du m² mensuel.</p>
</div>
| {
"content_hash": "2ded64b3f9ed9a1e25686b3ec105ddfd",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 249,
"avg_line_length": 86.42105263157895,
"alnum_prop": 0.7326431181485993,
"repo_name": "donaldinou/frontend",
"id": "0ad044ebf16382937e41b6be93c8139a749497e7",
"size": "1687",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Viteloge/CoreBundle/Resources/descriptions/29269.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3073"
},
{
"name": "CSS",
"bytes": "111338"
},
{
"name": "HTML",
"bytes": "58634405"
},
{
"name": "JavaScript",
"bytes": "88564"
},
{
"name": "PHP",
"bytes": "841919"
}
],
"symlink_target": ""
} |
<?xml version="1.0"?>
<project name="Build MagicTheGatheringService" basedir="." default="dist">
<description>
A build file to generate a distributable .jar for the RealTimeWeb MagicTheGatheringService.
</description>
<property name="src" location="src"/>
<property name="bin" location="bin"/>
<property name="dist" location="dist"/>
<property name="docs" location="docs"/>
<property name="ver" value="0.1"/>
<!-- <propertyfile file=".properties">
<entry key="docs" value="docs/" />
</propertyfile>-->
<path id="classpath">
<fileset dir="libs" includes="**/*.jar" />
</path>
<target name="init">
<!-- Create the build directory structure used by compile -->
<mkdir dir="${bin}"/>
<!-- Create the doc directory structure used by compile -->
<mkdir dir="${docs}"/>
</target>
<target name="compile" depends="init" description="Compile the source" >
<!-- Compile the java code from ${src} into ${build} -->
<javac srcdir="${src}" destdir="${bin}" classpathref="classpath"/>
<!-- Copy all the non-.java files to ${build} -->
<copy todir="bin">
<fileset dir="src" excludes="**/*.java" />
</copy>
<!-- Compile Javadoc -->
<javadoc
use='true' author='true' version='true'
overview='index.html'
access='package'
sourcepath='src'
packagenames='*.*'
destdir='${docs}'
windowtitle='MagicTheGatheringService: version ${ver}'
noqualifier='java.*:javax.*:com.sun.*'
linksource='true'
/>
</target>
<target name="dist" depends="compile" description="Generate the Distribution files" >
<!-- Create the distribution directory -->
<mkdir dir="${dist}"/>
<!-- Put everything in ${build} into the MagicTheGatheringService-${DSTAMP}.jar file -->
<jar jarfile="${dist}/MagicTheGatheringService-${ver}.jar" basedir="${bin}">
<fileset dir="bin" includes="bin/*.class" />
<zipgroupfileset dir="libs/" includes="*.jar" />
</jar>
<!-- Put everything in ${build} and ${docs} into the MagicTheGatheringService-doc-${DSTAMP}.jar file -->
<jar jarfile="${dist}/MagicTheGatheringService-doc-${ver}.jar" basedir="${bin}">
<fileset dir="bin" includes="bin/*.class" />
<fileset dir="docs" includes="**" />
<zipgroupfileset dir="libs/" includes="*.jar" />
</jar>
<!-- Put everything in ${build} and ${src} into the MagicTheGatheringService-src-${DSTAMP}.jar file -->
<jar jarfile="${dist}/MagicTheGatheringService-src-${ver}.jar" basedir="${bin}">
<fileset dir="bin" includes="bin/*.class" />
<fileset dir="src" includes="**" />
<zipgroupfileset dir="libs/" includes="*.jar" />
</jar>
</target>
<target name="clean" description="Clean up the files" >
<!-- Delete the ${build} and ${dist} directory trees -->
<delete dir="${bin}"/>
<delete dir="${dist}"/>
</target>
</project> | {
"content_hash": "33d4bbc8d4c996d803f65eef23245795",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 106,
"avg_line_length": 36.59493670886076,
"alnum_prop": 0.6191629194050502,
"repo_name": "RealTimeWeb/MagicTheGatheringService",
"id": "576977ae6a6ca2b5b49deccd22c6b95db6f4ac04",
"size": "2891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/build.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "36850"
},
{
"name": "Python",
"bytes": "15662"
},
{
"name": "Racket",
"bytes": "6499"
}
],
"symlink_target": ""
} |
using Microsoft.EntityFrameworkCore.Migrations;
using System;
using System.Collections.Generic;
namespace GithubTrendingVisualizer.Web.Migrations
{
public partial class InitialCreate : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "Repository",
columns: table => new
{
Id = table.Column<Guid>(type: "uniqueidentifier", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Repository", x => x.Id);
});
migrationBuilder.CreateTable(
name: "Users",
columns: table => new
{
Id = table.Column<Guid>(type: "uniqueidentifier", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Users", x => x.Id);
});
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "Repository");
migrationBuilder.DropTable(
name: "Users");
}
}
}
| {
"content_hash": "4f6f1ba55b92acfc2a2e6a3f4bbd824c",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 86,
"avg_line_length": 30.302325581395348,
"alnum_prop": 0.49884881043745205,
"repo_name": "marceloatg/GithubTrendingVisualizer",
"id": "6f22523839c0811cc7ac2e3c8171ca90d67635a6",
"size": "1305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GithubTrendingVisualizer/Migrations/20170918011347_InitialCreate.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "44502"
},
{
"name": "CSS",
"bytes": "603879"
},
{
"name": "HTML",
"bytes": "136565"
},
{
"name": "JavaScript",
"bytes": "959299"
}
],
"symlink_target": ""
} |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.config;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.testing.EqualsTester;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.config.BuildOptions;
import com.google.devtools.build.lib.analysis.config.FragmentOptions;
import com.google.devtools.build.lib.analysis.config.PatchTransition;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.testutil.TestRuleClassProvider;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsClassProvider;
import java.util.Map;
import javax.annotation.Nullable;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for the ConfigFeatureFlagTransitionFactory. */
@RunWith(JUnit4.class)
public final class ConfigFeatureFlagTransitionFactoryTest extends BuildViewTestCase {
private static final class ConfigFeatureFlagsOptionsProvider implements OptionsClassProvider {
private final ImmutableMap<Label, String> flagValues;
public ConfigFeatureFlagsOptionsProvider(Map<Label, String> flagValues) {
this.flagValues = ImmutableMap.copyOf(flagValues);
}
@Override
@Nullable
public <O extends OptionsBase> O getOptions(Class<O> optionsClass) {
if (optionsClass.equals(ConfigFeatureFlagConfiguration.Options.class)) {
ConfigFeatureFlagConfiguration.Options options =
(ConfigFeatureFlagConfiguration.Options)
new ConfigFeatureFlagConfiguration.Options().getDefault();
options.replaceFlagValues(flagValues);
return optionsClass.cast(options);
}
return null;
}
}
private static BuildOptions getOptionsWithoutFlagFragment() {
return BuildOptions.of(
ImmutableList.<Class<? extends FragmentOptions>>of(), OptionsClassProvider.EMPTY);
}
private static BuildOptions getOptionsWithFlagFragment(Map<Label, String> values) {
return BuildOptions.of(
ImmutableList.<Class<? extends FragmentOptions>>of(
ConfigFeatureFlagConfiguration.Options.class),
new ConfigFeatureFlagsOptionsProvider(values));
}
@Override
protected ConfiguredRuleClassProvider getRuleClassProvider() {
ConfiguredRuleClassProvider.Builder builder =
new ConfiguredRuleClassProvider.Builder().addRuleDefinition(new FeatureFlagSetterRule());
TestRuleClassProvider.addStandardRules(builder);
return builder.build();
}
@Test
public void emptyTransition_returnsOriginalOptionsIfFragmentNotPresent() throws Exception {
Rule rule = scratchRule("a", "empty", "feature_flag_setter(name = 'empty', flag_values = {})");
PatchTransition transition =
new ConfigFeatureFlagTransitionFactory("flag_values").buildTransitionFor(rule);
BuildOptions original = getOptionsWithoutFlagFragment();
BuildOptions converted = transition.apply(original);
assertThat(converted).isSameAs(original);
assertThat(original.contains(ConfigFeatureFlagConfiguration.Options.class)).isFalse();
}
@Test
public void populatedTransition_returnsOriginalOptionsIfFragmentNotPresent() throws Exception {
Rule rule =
scratchRule(
"a",
"flag_setter_a",
"feature_flag_setter(",
" name = 'flag_setter_a',",
" flag_values = {':flag': 'a'})",
"config_feature_flag(",
" name = 'flag',",
" allowed_values = ['a', 'b'],",
" default_value = 'a')");
PatchTransition transition =
new ConfigFeatureFlagTransitionFactory("flag_values").buildTransitionFor(rule);
BuildOptions original = getOptionsWithoutFlagFragment();
BuildOptions converted = transition.apply(original);
assertThat(converted).isSameAs(original);
assertThat(original.contains(ConfigFeatureFlagConfiguration.Options.class)).isFalse();
}
@Test
public void emptyTransition_returnsClearedOptionsIfFragmentPresent() throws Exception {
Rule rule = scratchRule("a", "empty", "feature_flag_setter(name = 'empty', flag_values = {})");
PatchTransition transition =
new ConfigFeatureFlagTransitionFactory("flag_values").buildTransitionFor(rule);
Map<Label, String> originalFlagMap = ImmutableMap.of(Label.parseAbsolute("//a:flag"), "value");
BuildOptions original = getOptionsWithFlagFragment(originalFlagMap);
BuildOptions converted = transition.apply(original);
assertThat(converted).isNotSameAs(original);
assertThat(original.get(ConfigFeatureFlagConfiguration.Options.class).getFlagValues())
.containsExactlyEntriesIn(originalFlagMap);
assertThat(converted.get(ConfigFeatureFlagConfiguration.Options.class).getFlagValues())
.isEmpty();
}
@Test
public void populatedTransition_setsOptionsAndClearsNonPresentOptionsIfFragmentPresent()
throws Exception {
Rule rule =
scratchRule(
"a",
"flag_setter_a",
"feature_flag_setter(",
" name = 'flag_setter_a',",
" flag_values = {':flag': 'a'})",
"config_feature_flag(",
" name = 'flag',",
" allowed_values = ['a', 'b'],",
" default_value = 'a')");
PatchTransition transition =
new ConfigFeatureFlagTransitionFactory("flag_values").buildTransitionFor(rule);
Map<Label, String> originalFlagMap = ImmutableMap.of(Label.parseAbsolute("//a:old"), "value");
Map<Label, String> expectedFlagMap = ImmutableMap.of(Label.parseAbsolute("//a:flag"), "a");
BuildOptions original = getOptionsWithFlagFragment(originalFlagMap);
BuildOptions converted = transition.apply(original);
assertThat(converted).isNotSameAs(original);
assertThat(original.get(ConfigFeatureFlagConfiguration.Options.class).getFlagValues())
.containsExactlyEntriesIn(originalFlagMap);
assertThat(converted.get(ConfigFeatureFlagConfiguration.Options.class).getFlagValues())
.containsExactlyEntriesIn(expectedFlagMap);
}
@Test
public void transition_equalsTester() throws Exception {
scratch.file(
"a/BUILD",
"feature_flag_setter(",
" name = 'empty',",
" flag_values = {})",
"feature_flag_setter(",
" name = 'empty2',",
" flag_values = {})",
"feature_flag_setter(",
" name = 'flag_setter_a',",
" flag_values = {':flag': 'a'})",
"feature_flag_setter(",
" name = 'flag_setter_a2',",
" flag_values = {':flag': 'a'})",
"feature_flag_setter(",
" name = 'flag_setter_b',",
" flag_values = {':flag': 'b'})",
"feature_flag_setter(",
" name = 'flag2_setter',",
" flag_values = {':flag2': 'a'})",
"feature_flag_setter(",
" name = 'both_setter',",
" flag_values = {':flag': 'a', ':flag2': 'a'})",
"config_feature_flag(",
" name = 'flag',",
" allowed_values = ['a', 'b'],",
" default_value = 'a')",
"config_feature_flag(",
" name = 'flag2',",
" allowed_values = ['a', 'b'],",
" default_value = 'a')");
Rule empty = (Rule) getTarget("//a:empty");
Rule empty2 = (Rule) getTarget("//a:empty2");
Rule flagSetterA = (Rule) getTarget("//a:flag_setter_a");
Rule flagSetterA2 = (Rule) getTarget("//a:flag_setter_a2");
Rule flagSetterB = (Rule) getTarget("//a:flag_setter_b");
Rule flag2Setter = (Rule) getTarget("//a:flag2_setter");
Rule bothSetter = (Rule) getTarget("//a:both_setter");
ConfigFeatureFlagTransitionFactory factory =
new ConfigFeatureFlagTransitionFactory("flag_values");
ConfigFeatureFlagTransitionFactory factory2 =
new ConfigFeatureFlagTransitionFactory("flag_values");
new EqualsTester()
.addEqualityGroup(
// transition with empty map
factory.buildTransitionFor(empty),
// transition produced by same factory on same rule
factory.buildTransitionFor(empty),
// transition produced by similar factory on same rule
factory2.buildTransitionFor(empty),
// transition produced by same factory on similar rule
factory.buildTransitionFor(empty2),
// transition produced by similar factory on similar rule
factory2.buildTransitionFor(empty2))
.addEqualityGroup(
// transition with flag -> a
factory.buildTransitionFor(flagSetterA),
// same map, different rule
factory.buildTransitionFor(flagSetterA2),
// same map, different factory
factory2.buildTransitionFor(flagSetterA))
.addEqualityGroup(
// transition with flag set to different value
factory.buildTransitionFor(flagSetterB))
.addEqualityGroup(
// transition with different flag set to same value
factory.buildTransitionFor(flag2Setter))
.addEqualityGroup(
// transition with more flags set
factory.buildTransitionFor(bothSetter))
.testEquals();
}
@Test
public void factory_equalsTester() throws Exception {
new EqualsTester()
.addEqualityGroup(
new ConfigFeatureFlagTransitionFactory("flag_values"),
new ConfigFeatureFlagTransitionFactory("flag_values"))
.addEqualityGroup(new ConfigFeatureFlagTransitionFactory("other_flag_values"))
.testEquals();
}
}
| {
"content_hash": "8e9ac8cf1dee7dad4bd4f20a2754adb5",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 99,
"avg_line_length": 41.73412698412698,
"alnum_prop": 0.6773794808405439,
"repo_name": "snnn/bazel",
"id": "b6d52305ee4f373921b34e4b5a5242dfd12d6b9e",
"size": "10517",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/test/java/com/google/devtools/build/lib/rules/config/ConfigFeatureFlagTransitionFactoryTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "14332"
},
{
"name": "C++",
"bytes": "1010722"
},
{
"name": "HTML",
"bytes": "20974"
},
{
"name": "Java",
"bytes": "26190064"
},
{
"name": "JavaScript",
"bytes": "9186"
},
{
"name": "Makefile",
"bytes": "248"
},
{
"name": "PowerShell",
"bytes": "5473"
},
{
"name": "Python",
"bytes": "605385"
},
{
"name": "Roff",
"bytes": "511"
},
{
"name": "Shell",
"bytes": "964833"
}
],
"symlink_target": ""
} |
<!-- Footer JoorxCMS -->
<footer class="footer navbar-fixed-bottom">
<div class="container">
© <?php echo date("Y");?> <a href="<?=base_url()?>"><?=$this->config->item('joorxcms_title')?></a> All Rights Reserved and Created by <strong><a onclick="window.open('https://github.com/hexageek1337/JoorxCMS/');" alt="Credit by JoorxCMS" title="JoorxCMS">JoorxCMS</a></strong> ♥
</div>
</footer>
<div class="footer-scroll">
<div class="button-scroll" onclick="scrolltotop()">
<a class="right-scroll"><i class="fa fa-angle-up" aria-hidden="true"></i></a>
</div>
</div>
<!-- Footer JoorxCMS -->
</body>
</html>
| {
"content_hash": "e7b8b367bc821dbffc265664a901caa5",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 290,
"avg_line_length": 43.857142857142854,
"alnum_prop": 0.6563517915309446,
"repo_name": "hexageek1337/JoorxCMS",
"id": "300e47c64087b6821f457c542e353f6f930fa478",
"size": "614",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "application/views/admin/footer.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "166224"
},
{
"name": "HTML",
"bytes": "7205"
},
{
"name": "JavaScript",
"bytes": "15625"
},
{
"name": "PHP",
"bytes": "1973622"
}
],
"symlink_target": ""
} |
const tangent = (t, curve) => {
}
module.exports = tangent
| {
"content_hash": "a70781537a30836eaeeaa3e4b4e8f526",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 31,
"avg_line_length": 12.2,
"alnum_prop": 0.6229508196721312,
"repo_name": "jscad/csg.js",
"id": "a6c6727f115889a6816d1bce043e7cb36ffdef09",
"size": "61",
"binary": false,
"copies": "1",
"ref": "refs/heads/V2",
"path": "src/core/geometry/curves/bezier2/tangent.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1459041"
}
],
"symlink_target": ""
} |
TubeTK Slicer Modules
=====================
---
*This file is part of [TubeTK](http://www.tubetk.org). TubeTK is developed by [Kitware, Inc.](http://www.kitware.com) and licensed under the [Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0).*
| {
"content_hash": "e0baeee9d0d0804b12b66c0b3296e44c",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 216,
"avg_line_length": 53.2,
"alnum_prop": 0.6654135338345865,
"repo_name": "cdeepakroy/TubeTK",
"id": "1d4f7c2f747b2330c358c6a428fee2b77c6c09b2",
"size": "266",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "SlicerModules/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9149"
},
{
"name": "C++",
"bytes": "3920935"
},
{
"name": "CMake",
"bytes": "625504"
},
{
"name": "CSS",
"bytes": "17428"
},
{
"name": "Python",
"bytes": "209126"
},
{
"name": "Shell",
"bytes": "37371"
},
{
"name": "XSLT",
"bytes": "8636"
}
],
"symlink_target": ""
} |
import sys
import random
lines = []
for line in sys.stdin:
lines.append(line)
#lines.sort()
length = len(lines)
for l in range(0,length):
choice = random.randint(0,len(lines)-1)
#print str(choice) + ' of ' + str(len(lines))
print lines.pop(choice),
| {
"content_hash": "a3135872517cbd846f208f1273684369",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 48,
"avg_line_length": 18.857142857142858,
"alnum_prop": 0.6590909090909091,
"repo_name": "timrdf/csv2rdf4lod-automation",
"id": "38bc1c66150621c019a345187666eb462d462299",
"size": "381",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/util/randomize-line-order.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "36758"
},
{
"name": "Batchfile",
"bytes": "5607"
},
{
"name": "C",
"bytes": "52334"
},
{
"name": "CSS",
"bytes": "5012"
},
{
"name": "HTML",
"bytes": "607634"
},
{
"name": "Java",
"bytes": "5909684"
},
{
"name": "Makefile",
"bytes": "433"
},
{
"name": "PHP",
"bytes": "26207"
},
{
"name": "Perl",
"bytes": "39517"
},
{
"name": "Python",
"bytes": "55028"
},
{
"name": "R",
"bytes": "455"
},
{
"name": "Shell",
"bytes": "1021416"
},
{
"name": "XSLT",
"bytes": "54468"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "a74a31e9e0ebb6df608027159eb66296",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "a68a25881596e3c11986c1309a1e5c6fc27f9a51",
"size": "217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Dothideomycetes/Pleosporales/Phaeosphaeriaceae/Phaeosphaeria/Phaeosphaeria pleurospora/ Syn. Sulcispora pleurospora/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
/*! fancyBox v2.1.5 fancyapps.com | fancyapps.com/fancybox/#license */
.fancybox-wrap,
.fancybox-skin,
.fancybox-outer,
.fancybox-inner,
.fancybox-image,
.fancybox-wrap iframe,
.fancybox-wrap object,
.fancybox-nav,
.fancybox-nav span,
.fancybox-tmp
{
padding: 0;
margin: 0;
border: 0;
outline: none;
vertical-align: top;
}
.fancybox-wrap {
position: absolute;
top: 0;
left: 0;
z-index: 8020;
}
.fancybox-skin {
position: relative;
background: #f9f9f9;
color: #444;
text-shadow: none;
}
.fancybox-opened {
z-index: 8030;
}
.fancybox-opened .fancybox-skin {
-webkit-box-shadow: 0 2px 10px rgba(0, 0, 0, 0.15);
-moz-box-shadow: 0 2px 10px rgba(0, 0, 0, 0.15);
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.15);
}
.fancybox-outer, .fancybox-inner {
position: relative;
}
.fancybox-inner {
overflow: hidden;
}
.fancybox-type-iframe .fancybox-inner {
-webkit-overflow-scrolling: touch;
}
.fancybox-error {
color: #444;
font: 14px/20px "Helvetica Neue",Helvetica,Arial,sans-serif;
margin: 0;
padding: 15px;
white-space: nowrap;
}
.fancybox-image, .fancybox-iframe {
display: block;
width: 100%;
height: 100%;
}
.fancybox-image {
max-width: 100%;
max-height: 100%;
}
#fancybox-loading, .fancybox-close, .fancybox-prev span, .fancybox-next span {
background-image: url('fancybox_sprite.png');
}
#fancybox-loading {
position: fixed;
top: 50%;
left: 50%;
margin-top: -22px;
margin-left: -22px;
background-position: 0 -108px;
opacity: 0.8;
cursor: pointer;
z-index: 8060;
}
#fancybox-loading div {
width: 44px;
height: 44px;
background: url('fancybox_loading.gif') center center no-repeat;
}
.fancybox-close {
position: absolute;
top: -18px;
right: -18px;
width: 36px;
height: 36px;
cursor: pointer;
z-index: 8040;
}
.fancybox-nav {
position: absolute;
top: 0;
width: 40%;
height: 100%;
cursor: pointer;
text-decoration: none;
background: transparent url('blank.gif'); /* helps IE */
-webkit-tap-highlight-color: rgba(0,0,0,0);
z-index: 8040;
}
.fancybox-prev {
left: 0;
}
.fancybox-next {
right: 0;
}
.fancybox-nav span {
position: absolute;
top: 50%;
width: 36px;
height: 34px;
margin-top: -18px;
cursor: pointer;
z-index: 8040;
visibility: hidden;
}
.fancybox-prev span {
left: 10px;
background-position: 0 -36px;
}
.fancybox-next span {
right: 10px;
background-position: 0 -72px;
}
.fancybox-nav:hover span {
visibility: visible;
}
.fancybox-tmp {
position: absolute;
top: -99999px;
left: -99999px;
visibility: hidden;
max-width: 99999px;
max-height: 99999px;
overflow: visible !important;
}
/* Overlay helper */
.fancybox-lock {
overflow: hidden !important;
width: auto;
}
.fancybox-lock body {
overflow: hidden !important;
}
.fancybox-lock-test {
overflow-y: hidden !important;
}
.fancybox-overlay {
position: absolute;
top: 0;
left: 0;
overflow: hidden;
display: none;
z-index: 8010;
background: rgba(255,255,255,0.5);
}
.fancybox-overlay-fixed {
position: fixed;
bottom: 0;
right: 0;
}
.fancybox-lock .fancybox-overlay {
overflow: auto;
overflow-y: auto;
}
/* Title helper */
.fancybox-title {
visibility: hidden;
font: normal 13px/20px "Helvetica Neue",Helvetica,Arial,sans-serif;
position: relative;
text-shadow: none;
z-index: 8050;
}
.fancybox-opened .fancybox-title {
visibility: visible;
}
.fancybox-title-float-wrap {
position: absolute;
bottom: 0;
right: 50%;
margin-bottom: -35px;
z-index: 8050;
text-align: center;
}
.fancybox-title-float-wrap .child {
display: inline-block;
margin-right: -100%;
padding: 2px 20px;
background: transparent; /* Fallback for web browsers that doesn't support RGBa */
background: rgba(0, 0, 0, 0.8);
-webkit-border-radius: 15px;
-moz-border-radius: 15px;
border-radius: 15px;
text-shadow: 0 1px 2px #222;
color: #FFF;
font-weight: bold;
line-height: 24px;
white-space: nowrap;
}
.fancybox-title-outside-wrap {
position: relative;
margin-top: 10px;
color: #fff;
}
.fancybox-title-inside-wrap {
padding-top: 10px;
}
.fancybox-title-over-wrap {
position: absolute;
bottom: 0;
left: 0;
color: #fff;
padding: 10px;
background: #000;
background: rgba(0, 0, 0, .8);
}
/*Retina graphics!*/
@media only screen and (-webkit-min-device-pixel-ratio: 1.5),
only screen and (min--moz-device-pixel-ratio: 1.5),
only screen and (min-device-pixel-ratio: 1.5){
#fancybox-loading, .fancybox-close, .fancybox-prev span, .fancybox-next span {
background-image: url('[email protected]');
background-size: 44px 152px; /*The size of the normal image, half the size of the hi-res image*/
}
#fancybox-loading div {
background-image: url('[email protected]');
background-size: 24px 24px; /*The size of the normal image, half the size of the hi-res image*/
}
} | {
"content_hash": "27b3af3e6f632d894a6a574658af87f1",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 98,
"avg_line_length": 17.71217712177122,
"alnum_prop": 0.6808333333333333,
"repo_name": "chenyongze/iwshop",
"id": "b2b85fa3ee6fad24575986141685dd55c477ec2c",
"size": "4800",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "static/script/fancyBox/source/jquery.fancybox.css",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "52"
},
{
"name": "CSS",
"bytes": "665725"
},
{
"name": "HTML",
"bytes": "2273052"
},
{
"name": "Java",
"bytes": "95797"
},
{
"name": "JavaScript",
"bytes": "11898000"
},
{
"name": "PHP",
"bytes": "4801026"
},
{
"name": "Shell",
"bytes": "147"
},
{
"name": "Smarty",
"bytes": "353193"
}
],
"symlink_target": ""
} |
package org.wso2.carbon.apimgt.rest.api.admin.dto;
import org.wso2.carbon.apimgt.rest.api.admin.dto.ThrottleConditionDTO;
import io.swagger.annotations.*;
import com.fasterxml.jackson.annotation.*;
import javax.validation.constraints.NotNull;
@ApiModel(description = "")
public class DateConditionDTO extends ThrottleConditionDTO {
private String specificDate = null;
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("specificDate")
public String getSpecificDate() {
return specificDate;
}
public void setSpecificDate(String specificDate) {
this.specificDate = specificDate;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class DateConditionDTO {\n");
sb.append(" " + super.toString()).append("\n");
sb.append(" specificDate: ").append(specificDate).append("\n");
sb.append("}\n");
return sb.toString();
}
}
| {
"content_hash": "ae991246e9dc17cc60b12a8ecdfabe9f",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 70,
"avg_line_length": 21.363636363636363,
"alnum_prop": 0.6925531914893617,
"repo_name": "dhanuka84/carbon-apimgt",
"id": "a061af97447c416c300cd08834d8113ccf447808",
"size": "940",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "components/apimgt/org.wso2.carbon.apimgt.rest.api.admin/src/gen/java/org/wso2/carbon/apimgt/rest/api/admin/dto/DateConditionDTO.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "48278"
},
{
"name": "Batchfile",
"bytes": "7251"
},
{
"name": "CSS",
"bytes": "2469890"
},
{
"name": "HTML",
"bytes": "354916"
},
{
"name": "Java",
"bytes": "4692629"
},
{
"name": "JavaScript",
"bytes": "16952271"
},
{
"name": "PLSQL",
"bytes": "127903"
},
{
"name": "PLpgSQL",
"bytes": "34434"
},
{
"name": "Shell",
"bytes": "21071"
},
{
"name": "Thrift",
"bytes": "1730"
},
{
"name": "XSLT",
"bytes": "142140"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.Management;
using System.Globalization;
using SimpleCrm.DynamicSql;
namespace SQLiteTools.Utils
{
public partial class DynamicSqlTestForm : Form
{
public DynamicSqlTestForm()
{
InitializeComponent();
}
private void btnGetMachineCode_Click(object sender, EventArgs e)
{
var sqltuple = SqlParser.Eval(txtDynamicSql.Text, new Customer() { Name = this.txtName.Text, Address = this.txtAddress.Text });
txtSql.Text = sqltuple.Item1;
}
}
public class Customer
{
public long Id { get; set; }
public String Name { get; set; }
public String Address { get; set; }
public Boolean HasNameOrAddress
{
get
{
return !String.IsNullOrWhiteSpace(Name) || !String.IsNullOrWhiteSpace(Address);
}
}
}
}
| {
"content_hash": "69490b32abf689bc99bc4cea237337b4",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 139,
"avg_line_length": 25.372093023255815,
"alnum_prop": 0.6260311640696609,
"repo_name": "yyitsz/myjavastudio",
"id": "68a6ff8bca01532705e844d6da90ba75843baf7b",
"size": "1093",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SimpleCrm/SQLiteTools/Utils/DynamicSqlTestForm.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "29361"
},
{
"name": "Batchfile",
"bytes": "5917"
},
{
"name": "C#",
"bytes": "1904272"
},
{
"name": "CSS",
"bytes": "206344"
},
{
"name": "HTML",
"bytes": "53455"
},
{
"name": "Java",
"bytes": "3185115"
},
{
"name": "JavaScript",
"bytes": "654297"
},
{
"name": "PLSQL",
"bytes": "10155"
},
{
"name": "Roff",
"bytes": "178"
},
{
"name": "Shell",
"bytes": "418"
},
{
"name": "XSLT",
"bytes": "12347"
}
],
"symlink_target": ""
} |
package bitbucket
import (
"fmt"
"net/http"
"net/url"
"regexp"
"time"
"github.com/armab/drone/shared/httputil"
"github.com/armab/drone/shared/model"
"github.com/drone/go-bitbucket/bitbucket"
"github.com/drone/go-bitbucket/oauth1"
)
const (
DefaultAPI = "https://api.bitbucket.org/1.0"
DefaultURL = "https://bitbucket.org"
)
// parses an email address from string format
// `John Doe <[email protected]>`
var emailRegexp = regexp.MustCompile("<(.*)>")
type Bitbucket struct {
URL string
API string
Client string
Secret string
Open bool
}
func New(url, api, client, secret string, open bool) *Bitbucket {
return &Bitbucket{
URL: url,
API: api,
Client: client,
Secret: secret,
Open: open,
}
}
func NewDefault(client, secret string, open bool) *Bitbucket {
return New(DefaultURL, DefaultAPI, client, secret, open)
}
// Authorize handles Bitbucket API Authorization
func (r *Bitbucket) Authorize(res http.ResponseWriter, req *http.Request) (*model.Login, error) {
consumer := oauth1.Consumer{
RequestTokenURL: "https://bitbucket.org/api/1.0/oauth/request_token/",
AuthorizationURL: "https://bitbucket.org/!api/1.0/oauth/authenticate",
AccessTokenURL: "https://bitbucket.org/api/1.0/oauth/access_token/",
CallbackURL: httputil.GetScheme(req) + "://" + httputil.GetHost(req) + "/api/auth/bitbucket.org",
ConsumerKey: r.Client,
ConsumerSecret: r.Secret,
}
// get the oauth verifier
verifier := req.FormValue("oauth_verifier")
if len(verifier) == 0 {
// Generate a Request Token
requestToken, err := consumer.RequestToken()
if err != nil {
return nil, err
}
// add the request token as a signed cookie
httputil.SetCookie(res, req, "bitbucket_token", requestToken.Encode())
url, _ := consumer.AuthorizeRedirect(requestToken)
http.Redirect(res, req, url, http.StatusSeeOther)
return nil, nil
}
// remove bitbucket token data once before redirecting
// back to the application.
defer httputil.DelCookie(res, req, "bitbucket_token")
// get the tokens from the request
requestTokenStr := httputil.GetCookie(req, "bitbucket_token")
requestToken, err := oauth1.ParseRequestTokenStr(requestTokenStr)
if err != nil {
return nil, err
}
// exchange for an access token
accessToken, err := consumer.AuthorizeToken(requestToken, verifier)
if err != nil {
return nil, err
}
// create the Bitbucket client
client := bitbucket.New(
r.Client,
r.Secret,
accessToken.Token(),
accessToken.Secret(),
)
// get the currently authenticated Bitbucket User
user, err := client.Users.Current()
if err != nil {
return nil, err
}
// put the user data in the common format
login := model.Login{
Login: user.User.Username,
Access: accessToken.Token(),
Secret: accessToken.Secret(),
Name: user.User.DisplayName,
}
email, _ := client.Emails.FindPrimary(user.User.Username)
if email != nil {
login.Email = email.Email
}
return &login, nil
}
// GetKind returns the internal identifier of this remote Bitbucket instane.
func (r *Bitbucket) GetKind() string {
return model.RemoteBitbucket
}
// GetHost returns the hostname of this remote Bitbucket instance.
func (r *Bitbucket) GetHost() string {
uri, _ := url.Parse(r.URL)
return uri.Host
}
// GetRepos fetches all repositories that the specified
// user has access to in the remote system.
func (r *Bitbucket) GetRepos(user *model.User) ([]*model.Repo, error) {
var repos []*model.Repo
var client = bitbucket.New(
r.Client,
r.Secret,
user.Access,
user.Secret,
)
var list, err = client.Repos.List()
if err != nil {
return nil, err
}
var remote = r.GetKind()
var hostname = r.GetHost()
for _, item := range list {
// for now we only support git repos
if item.Scm != "git" {
continue
}
// these are the urls required to clone the repository
// TODO use the bitbucketurl.Host and bitbucketurl.Scheme instead of hardcoding
// so that we can support Stash.
var html = fmt.Sprintf("https://bitbucket.org/%s/%s", item.Owner, item.Slug)
var clone = fmt.Sprintf("https://bitbucket.org/%s/%s.git", item.Owner, item.Slug)
var ssh = fmt.Sprintf("[email protected]:%s/%s.git", item.Owner, item.Slug)
var repo = model.Repo{
UserID: user.ID,
Remote: remote,
Host: hostname,
Owner: item.Owner,
Name: item.Slug,
Private: item.Private,
URL: html,
CloneURL: clone,
GitURL: clone,
SSHURL: ssh,
Role: &model.Perm{
Admin: true,
Write: true,
Read: true,
},
}
if repo.Private {
repo.CloneURL = repo.SSHURL
}
repos = append(repos, &repo)
}
return repos, err
}
// GetScript fetches the build script (.drone.yml) from the remote
// repository and returns in string format.
func (r *Bitbucket) GetScript(user *model.User, repo *model.Repo, hook *model.Hook) ([]byte, error) {
var client = bitbucket.New(
r.Client,
r.Secret,
user.Access,
user.Secret,
)
// get the yaml from the database
var raw, err = client.Sources.Find(repo.Owner, repo.Name, hook.Sha, ".drone.yml")
if err != nil {
return nil, err
}
return []byte(raw.Data), nil
}
// Activate activates a repository by adding a Post-commit hook and
// a Public Deploy key, if applicable.
func (r *Bitbucket) Activate(user *model.User, repo *model.Repo, link string) error {
var client = bitbucket.New(
r.Client,
r.Secret,
user.Access,
user.Secret,
)
// parse the hostname from the hook, and use this
// to name the ssh key
var hookurl, err = url.Parse(link)
if err != nil {
return err
}
// if the repository is private we'll need
// to upload a github key to the repository
if repo.Private {
// name the key
var keyname = "drone@" + hookurl.Host
var _, err = client.RepoKeys.CreateUpdate(repo.Owner, repo.Name, repo.PublicKey, keyname)
if err != nil {
return err
}
}
// add the hook
_, err = client.Brokers.CreateUpdate(repo.Owner, repo.Name, link, bitbucket.BrokerTypePost)
return err
}
// Deactivate removes a repository by removing all the post-commit hooks
// which are equal to link and removing the SSH deploy key.
func (r *Bitbucket) Deactivate(user *model.User, repo *model.Repo, link string) error {
var client = bitbucket.New(
r.Client,
r.Secret,
user.Access,
user.Secret,
)
title, err := GetKeyTitle(link)
if err != nil {
return err
}
if err := client.RepoKeys.DeleteName(repo.Owner, repo.Name, title); err != nil {
return err
}
return client.Brokers.DeleteUrl(repo.Owner, repo.Name, link, bitbucket.BrokerTypePost)
}
// ParseHook parses the post-commit hook from the Request body
// and returns the required data in a standard format.
func (r *Bitbucket) ParseHook(req *http.Request) (*model.Hook, error) {
var payload = req.FormValue("payload")
var hook, err = bitbucket.ParseHook([]byte(payload))
if err != nil {
return nil, err
}
// verify the payload has the minimum amount of required data.
if hook.Repo == nil || hook.Commits == nil || len(hook.Commits) == 0 {
return nil, fmt.Errorf("Invalid Bitbucket post-commit Hook. Missing Repo or Commit data.")
}
var author = hook.Commits[len(hook.Commits)-1].RawAuthor
var matches = emailRegexp.FindStringSubmatch(author)
if len(matches) == 2 {
author = matches[1]
}
return &model.Hook{
Owner: hook.Repo.Owner,
Repo: hook.Repo.Slug,
Sha: hook.Commits[len(hook.Commits)-1].Hash,
Branch: hook.Commits[len(hook.Commits)-1].Branch,
Author: author,
Timestamp: time.Now().UTC().String(),
Message: hook.Commits[len(hook.Commits)-1].Message,
}, nil
}
func (r *Bitbucket) OpenRegistration() bool {
return r.Open
}
func (r *Bitbucket) GetToken(user *model.User) (*model.Token, error) {
return nil, nil
}
// GetKeyTitle is a helper function that generates a title for the
// RSA public key based on the username and domain name.
func GetKeyTitle(rawurl string) (string, error) {
var uri, err = url.Parse(rawurl)
if err != nil {
return "", err
}
return fmt.Sprintf("drone@%s", uri.Host), nil
}
| {
"content_hash": "1ac7da6a8be38d2555f0d8b4b7ea910a",
"timestamp": "",
"source": "github",
"line_count": 310,
"max_line_length": 104,
"avg_line_length": 25.951612903225808,
"alnum_prop": 0.6876320696084525,
"repo_name": "armab/drone",
"id": "85ebfe4d29993e43c49a2ac2018e57981f09ad42",
"size": "8045",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugin/remote/bitbucket/bitbucket.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "44707"
},
{
"name": "Go",
"bytes": "495052"
},
{
"name": "HTML",
"bytes": "27967"
},
{
"name": "JavaScript",
"bytes": "42277"
},
{
"name": "Makefile",
"bytes": "3666"
},
{
"name": "Shell",
"bytes": "6694"
}
],
"symlink_target": ""
} |
from toontown.safezone import DGPlayground
from toontown.safezone import SafeZoneLoader
class DGSafeZoneLoader(SafeZoneLoader.SafeZoneLoader):
def __init__(self, hood, parentFSM, doneEvent):
SafeZoneLoader.SafeZoneLoader.__init__(self, hood, parentFSM, doneEvent)
self.playgroundClass = DGPlayground.DGPlayground
self.musicFile = 'phase_8/audio/bgm/DG_nbrhood.ogg'
self.activityMusicFile = 'phase_8/audio/bgm/DG_SZ.ogg'
self.dnaFile = 'phase_8/dna/daisys_garden_sz.pdna'
self.safeZoneStorageDNAFile = 'phase_8/dna/storage_DG_sz.pdna'
def load(self):
SafeZoneLoader.SafeZoneLoader.load(self)
self.birdSound = map(base.loadSfx, ['phase_8/audio/sfx/SZ_DG_bird_01.ogg',
'phase_8/audio/sfx/SZ_DG_bird_02.ogg',
'phase_8/audio/sfx/SZ_DG_bird_03.ogg',
'phase_8/audio/sfx/SZ_DG_bird_04.ogg'])
def unload(self):
SafeZoneLoader.SafeZoneLoader.unload(self)
del self.birdSound | {
"content_hash": "958645060ac3f100f3e8761de146d2a8",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 83,
"avg_line_length": 47.43478260869565,
"alnum_prop": 0.6223648029330889,
"repo_name": "Spiderlover/Toontown",
"id": "7b5870a79d8b4254aeb1b7afb006e233576b8ef9",
"size": "1091",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "toontown/safezone/DGSafeZoneLoader.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7774"
},
{
"name": "Python",
"bytes": "17241353"
},
{
"name": "Shell",
"bytes": "7699"
}
],
"symlink_target": ""
} |
using InfoBridge.SuperLinq.Core.Attributes;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace InfoBridge.SuperLinq.Tests.Unit.Helpers
{
[TableInfo("inheritingtestperson")]
public class InheritingTestPerson : TestPerson
{
[ColumnInfo("newid")]
public override int Id { get; set; }
public string ExtraProperty { get; set; }
}
}
| {
"content_hash": "c7cd257ec64a7da39b75142d53af66c5",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 50,
"avg_line_length": 24.72222222222222,
"alnum_prop": 0.7146067415730337,
"repo_name": "mawax/InfoBridge.SuperLinq",
"id": "36cbd655e69058633e46f358e3138eec70288365",
"size": "447",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/InfoBridge.SuperLinq.Tests.Unit/Helpers/InheritingTestPerson.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "2155971"
}
],
"symlink_target": ""
} |
using System.Web;
using System.Web.Http;
namespace ResourceServer__Web_API_v1_.Areas.HelpPage
{
/// <summary>
/// Use this class to customize the Help Page.
/// For example you can set a custom <see cref="System.Web.Http.Description.IDocumentationProvider"/> to supply the documentation
/// or you can provide the samples for the requests/responses.
/// </summary>
public static class HelpPageConfig
{
public static void Register(HttpConfiguration config)
{
//// Uncomment the following to use the documentation from XML documentation file.
config.SetDocumentationProvider(new XmlDocumentationProvider(HttpContext.Current.Server.MapPath("~/App_Data/ResourceServer.xml")));
//// Uncomment the following to use "sample string" as the sample for all actions that have string as the body parameter or return type.
//// Also, the string arrays will be used for IEnumerable<string>. The sample objects will be serialized into different media type
//// formats by the available formatters.
//config.SetSampleObjects(new Dictionary<Type, object>
//{
// {typeof(string), "sample string"},
// {typeof(IEnumerable<string>), new string[]{"sample 1", "sample 2"}}
//});
//// Uncomment the following to use "[0]=foo&[1]=bar" directly as the sample for all actions that support form URL encoded format
//// and have IEnumerable<string> as the body parameter or return type.
//config.SetSampleForType("[0]=foo&[1]=bar", new MediaTypeHeaderValue("application/x-www-form-urlencoded"), typeof(IEnumerable<string>));
//// Uncomment the following to use "1234" directly as the request sample for media type "text/plain" on the controller named "Values"
//// and action named "Put".
//config.SetSampleRequest("1234", new MediaTypeHeaderValue("text/plain"), "Values", "Put");
//// Uncomment the following to use the image on "../images/aspNetHome.png" directly as the response sample for media type "image/png"
//// on the controller named "Values" and action named "Get" with parameter "id".
//config.SetSampleResponse(new ImageSample("../images/aspNetHome.png"), new MediaTypeHeaderValue("image/png"), "Values", "Get", "id");
//// Uncomment the following to correct the sample request when the action expects an HttpRequestMessage with ObjectContent<string>.
//// The sample will be generated as if the controller named "Values" and action named "Get" were having string as the body parameter.
//config.SetActualRequestType(typeof(string), "Values", "Get");
//// Uncomment the following to correct the sample response when the action returns an HttpResponseMessage with ObjectContent<string>.
//// The sample will be generated as if the controller named "Values" and action named "Post" were returning a string.
//config.SetActualResponseType(typeof(string), "Values", "Post");
}
}
} | {
"content_hash": "07f140606b6ba7a65f90931aa5c5f59f",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 149,
"avg_line_length": 65.25,
"alnum_prop": 0.669220945083014,
"repo_name": "IdentityModel/AuthorizationServer",
"id": "952ea9a73ee9c824ce016a90a18b9afe91b722bd",
"size": "3132",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "samples/Flows/ResourceServer (Web API v1)/Areas/HelpPage/App_Start/HelpPageConfig.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "131"
},
{
"name": "C#",
"bytes": "324416"
},
{
"name": "CSS",
"bytes": "140361"
},
{
"name": "JavaScript",
"bytes": "65620"
}
],
"symlink_target": ""
} |
using System;
using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Core.ContentPages")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("Core.ContentPages")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2011")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
[assembly: CLSCompliant(true)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("fde6e388-fe03-48cc-9b05-61bef749fb59")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"content_hash": "644e87c0cd7e2a87ebe4e78b9fc9187c",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 84,
"avg_line_length": 37.68421052631579,
"alnum_prop": 0.7458100558659218,
"repo_name": "coreframework/Core-Framework",
"id": "b2bac32d92b17c9c391a1e6eb00acd00c50d2fb2",
"size": "1435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/Core.WebContent/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ASP",
"bytes": "334271"
},
{
"name": "C#",
"bytes": "2629177"
},
{
"name": "ColdFusion",
"bytes": "123445"
},
{
"name": "JavaScript",
"bytes": "3485701"
},
{
"name": "PHP",
"bytes": "40147"
},
{
"name": "Perl",
"bytes": "33827"
},
{
"name": "Python",
"bytes": "38049"
}
],
"symlink_target": ""
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<artifactId>keycloak-parent</artifactId>
<groupId>org.keycloak</groupId>
<version>1.4.0.Final-SNAPSHOT</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<name>Keycloak Wildfly 9 Adapter</name>
<description/>
<modelVersion>4.0.0</modelVersion>
<artifactId>keycloak-wf9-adapter-dist-pom</artifactId>
<packaging>pom</packaging>
<modules>
<module>wf9-modules</module>
<module>wf9-adapter-zip</module>
</modules>
</project>
| {
"content_hash": "d640700c7fde4441b3982f2e5caeedee",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 105,
"avg_line_length": 37.05,
"alnum_prop": 0.650472334682861,
"repo_name": "cmoulliard/keycloak",
"id": "275c4e38e725356de6f5e8e1ff5995a0446f9428",
"size": "741",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "distribution/adapters/wf9-adapter/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "22819"
},
{
"name": "CSS",
"bytes": "549903"
},
{
"name": "HTML",
"bytes": "353305"
},
{
"name": "Java",
"bytes": "7452851"
},
{
"name": "JavaScript",
"bytes": "1281327"
},
{
"name": "Shell",
"bytes": "10703"
},
{
"name": "XSLT",
"bytes": "6012"
}
],
"symlink_target": ""
} |
// Copyright 2014 The Rector & Visitors of the University of Virginia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using HealthKit;
using Foundation;
using System;
using System.Threading;
using System.Collections.Generic;
using Sensus.Probes.User.Health;
using Syncfusion.SfChart.XForms;
using System.Threading.Tasks;
namespace Sensus.iOS.Probes.User.Health
{
public class iOSHealthKitWheelChairUseProbe : iOSHealthKitProbe
{
public sealed override string DisplayName
{
get
{
return "HealthKit Wheelchair Use";
}
}
public override Type DatumType
{
get
{
return typeof(WheelChairUseDatum);
}
}
public override int DefaultPollingSleepDurationMS
{
get
{
return int.MaxValue;
}
}
public iOSHealthKitWheelChairUseProbe()
: base(HKCharacteristicType.Create(HKCharacteristicTypeIdentifier.WheelchairUse))
{
}
protected override Task<List<Datum>> PollAsync(CancellationToken cancellationToken)
{
List<Datum> data = new List<Datum>();
NSError error;
HKWheelchairUseObject wheelChair = HealthStore.GetWheelchairUse(out error);
if (error == null)
{
if (wheelChair.WheelchairUse == HKWheelchairUse.NotSet)
{
data.Add(new WheelChairUseDatum(DateTimeOffset.Now, WheelChairUse.NotSet));
}
else if (wheelChair.WheelchairUse == HKWheelchairUse.No)
{
data.Add(new WheelChairUseDatum(DateTimeOffset.Now, WheelChairUse.No));
}
else if (wheelChair.WheelchairUse == HKWheelchairUse.Yes)
{
data.Add(new WheelChairUseDatum(DateTimeOffset.Now, WheelChairUse.Yes));
}
else
{
throw new Exception("User has not provided -- or has not allowed access to -- their wheel chair use status.");
}
}
else
{
throw new Exception("Error reading wheel chair use status: " + error.Description);
}
return Task.FromResult(data);
}
protected override ChartSeries GetChartSeries()
{
throw new NotImplementedException();
}
protected override ChartDataPoint GetChartDataPointFromDatum(Datum datum)
{
throw new NotImplementedException();
}
protected override ChartAxis GetChartPrimaryAxis()
{
throw new NotImplementedException();
}
protected override RangeAxisBase GetChartSecondaryAxis()
{
throw new NotImplementedException();
}
}
} | {
"content_hash": "956819ca46589ed53192d250e6f7f007",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 130,
"avg_line_length": 31.216216216216218,
"alnum_prop": 0.5924963924963925,
"repo_name": "predictive-technology-laboratory/sensus",
"id": "da339d920ddb12894c396227553d32ca84527507",
"size": "3467",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Sensus.iOS.Shared/Probes/User/Health/iOSHealthKitWheelchairUseProbe.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "2340482"
},
{
"name": "HTML",
"bytes": "109896"
},
{
"name": "JavaScript",
"bytes": "1068"
},
{
"name": "Python",
"bytes": "13651"
},
{
"name": "R",
"bytes": "30597"
},
{
"name": "Shell",
"bytes": "42994"
}
],
"symlink_target": ""
} |
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**CompletedAt** | [**time.Time**](time.Time.md) | Time when task was completed. Always in UTC. | [optional] [default to null]
**Reason** | **string** | Machine readable reason failure, if status=error. Only used by the /error endpoint. | [optional] [default to null]
**Error_** | **string** | Error message, if status=error. Only used by the /error endpoint. | [optional] [default to null]
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
| {
"content_hash": "11058418ca83638fed2ce706de7ea173",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 161,
"avg_line_length": 73.33333333333333,
"alnum_prop": 0.6242424242424243,
"repo_name": "iron-io/titan_go",
"id": "5ab2875e9cf21efc225cffe8a1ccb65ce64ea16e",
"size": "686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/Complete.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "141803"
},
{
"name": "Shell",
"bytes": "1630"
}
],
"symlink_target": ""
} |
package org.apache.samza.rest.proxy.job;
import java.util.Set;
import org.apache.samza.SamzaException;
import org.apache.samza.config.ConfigFactory;
import org.apache.samza.rest.model.JobStatus;
import org.apache.samza.rest.model.yarn.YarnApplicationInfo;
import org.apache.samza.rest.proxy.installation.InstallationFinder;
import org.apache.samza.rest.proxy.installation.InstallationRecord;
import org.apache.samza.rest.proxy.installation.SimpleInstallationFinder;
import org.apache.samza.rest.resources.JobsResourceConfig;
import org.apache.samza.util.ReflectionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extends the {@link ScriptJobProxy} with methods specific to simple Samza deployments.
*/
public class SimpleYarnJobProxy extends ScriptJobProxy {
private static final Logger log = LoggerFactory.getLogger(SimpleYarnJobProxy.class);
private static final String START_SCRIPT_NAME = "run-job.sh";
private static final String STOP_SCRIPT_NAME = "kill-yarn-job-by-name.sh";
private static final String CONFIG_FACTORY_PARAM = "--config-factory=org.apache.samza.config.factories.PropertiesConfigFactory";
private static final String CONFIG_PATH_PARAM_FORMAT = "--config-path=file://%s";
private final JobStatusProvider statusProvider;
private final InstallationFinder installFinder;
public SimpleYarnJobProxy(JobsResourceConfig config) throws Exception {
super(config);
this.installFinder = new SimpleInstallationFinder(config.getInstallationsPath(),
ReflectionUtil.getObj(getClass().getClassLoader(), config.getJobConfigFactory(), ConfigFactory.class));
this.statusProvider = new YarnRestJobStatusProvider(config);
}
@Override
public void start(JobInstance jobInstance)
throws Exception {
JobStatus currentStatus = getJobSamzaStatus(jobInstance);
if (currentStatus.hasBeenStarted()) {
log.info("Job {} will not be started because it is currently {}.", jobInstance, currentStatus.toString());
return;
}
String scriptPath = getScriptPath(jobInstance, START_SCRIPT_NAME);
int resultCode = scriptRunner.runScript(scriptPath, CONFIG_FACTORY_PARAM,
generateConfigPathParameter(jobInstance));
if (resultCode != 0) {
throw new SamzaException("Failed to start job. Result code: " + resultCode);
}
}
@Override
public void stop(JobInstance jobInstance)
throws Exception {
JobStatus currentStatus = getJobSamzaStatus(jobInstance);
if (!currentStatus.hasBeenStarted()) {
log.info("Job {} will not be stopped because it is currently {}.", jobInstance, currentStatus.toString());
return;
}
String scriptPath = getScriptPath(jobInstance, STOP_SCRIPT_NAME);
int resultCode = scriptRunner.runScript(scriptPath, YarnApplicationInfo.getQualifiedJobName(jobInstance));
if (resultCode != 0) {
throw new SamzaException("Failed to stop job. Result code: " + resultCode);
}
}
/**
* Generates the command line argument which specifies the path to the config file for the job.
*
* @param jobInstance the instance of the job.
* @return the --config-path command line argument.
*/
private String generateConfigPathParameter(JobInstance jobInstance) {
InstallationRecord record = installFinder.getAllInstalledJobs().get(jobInstance);
return String.format(CONFIG_PATH_PARAM_FORMAT, record.getConfigFilePath());
}
/**
* @return the {@link JobStatusProvider} to use for retrieving job status.
*/
public JobStatusProvider getJobStatusProvider() {
return statusProvider;
}
@Override
protected Set<JobInstance> getAllJobInstances() {
return installFinder.getAllInstalledJobs().keySet();
}
@Override
protected InstallationFinder getInstallationFinder() {
return installFinder;
}
}
| {
"content_hash": "0d0cb2963e2d4fd9363973bd39eb8fe6",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 130,
"avg_line_length": 38.26,
"alnum_prop": 0.7532671197072661,
"repo_name": "Swrrt/Samza",
"id": "60d0ab5ace1b6f7e637dc32936c5f62fd3d0d94c",
"size": "4633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samza-rest/src/main/java/org/apache/samza/rest/proxy/job/SimpleYarnJobProxy.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package uefivars
| {
"content_hash": "040be2097212ee8cee56f84d5055aaf3",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 16,
"avg_line_length": 17,
"alnum_prop": 0.8823529411764706,
"repo_name": "u-root/u-root",
"id": "786e0e7310f0b2b53a227a92bbc467b9e6af6d59",
"size": "426",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "pkg/uefivars/doc.go",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "5740"
},
{
"name": "C",
"bytes": "598"
},
{
"name": "Dockerfile",
"bytes": "14361"
},
{
"name": "Go",
"bytes": "4450126"
},
{
"name": "Makefile",
"bytes": "185"
},
{
"name": "Shell",
"bytes": "4236"
}
],
"symlink_target": ""
} |
class Header < BinData::Record
endian :big
uint16 :version
end
class Netflow5PDU < BinData::Record
endian :big
uint16 :version
uint16 :flow_records
uint32 :uptime
uint32 :unix_sec
uint32 :unix_nsec
uint32 :flow_seq_num
uint8 :engine_type
uint8 :engine_id
bit1 :sampling_type
bit14 :sampling_interval
array :records, :initial_length => :flow_records do
uint32 :srcaddr
uint32 :dstaddr
uint32 :nexthop
uint16 :iface_in
uint16 :iface_out
uint32 :packets
uint32 :octets
uint32 :first_uptime
uint32 :last_uptime
uint16 :srcport
uint16 :dstport
uint8 :pad1
uint8 :tcpflags
uint8 :proto
uint8 :tos
uint16 :srcas
uint16 :dstas
uint8 :srcmask
uint8 :dstmask
uint16 :pad2
end
end
class TemplateFlowset < BinData::Record
endian :big
uint16 :flowset_id
uint16 :flowset_length
uint16 :flowset_template_id
uint16 :flowset_field_count
string :flowset, :read_length => :flowset_length
end
class Netflow9PDU < BinData::Record
endian :big
uint16 :version
uint16 :flow_records
uint32 :uptime
uint32 :unix_sec
uint32 :flow_seq_num
uint32 :source_id
template_flowset :template_flowset
uint16 :template_id
uint16 :data_flowset_length
#string :data, :read_length => lambda { data_flowset_length - 32 }
end
class DataFlowset < BinData::Record
endian :big
uint16 :template_id
uint16 :data_flowset_length
end
| {
"content_hash": "647b69190b7fc3bf00ef5a1e01b4e0f6",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 70,
"avg_line_length": 20.194444444444443,
"alnum_prop": 0.6932599724896836,
"repo_name": "seeingidog/netflow",
"id": "9f0839d4b2e22638c9f883b4e0e9a88c3acf7163",
"size": "1454",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/netflow/models/binary_models.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "6286"
}
],
"symlink_target": ""
} |
// DATA_TEMPLATE: js_data
oTest.fnStart("aoColumns.sClass");
$(document).ready(function () {
/* Check the default */
var oTable = $('#example').dataTable({
"aaData": gaaData
});
var oSettings = oTable.fnSettings();
oTest.fnTest(
"By default the test class hasn't been applied to the column (sanity!)",
null,
function () {
return $('#example tbody tr:eq(0) td:eq(2)').hasClass('unittest') == false;
}
);
oTest.fnTest(
"Add a class to a single column - first row",
function () {
oSession.fnRestore();
$('#example').dataTable({
"aaData": gaaData,
"aoColumns": [
null,
null,
{"sClass": 'unittest'},
null,
null
]
});
},
function () {
return $('#example tbody tr:eq(1) td:eq(2)').hasClass('unittest');
}
);
oTest.fnTest(
"Add a class to a single column - third row",
null,
function () {
return $('#example tbody tr:eq(3) td:eq(2)').hasClass('unittest');
}
);
oTest.fnTest(
"Add a class to a single column - last row",
null,
function () {
return $('#example tbody tr:eq(9) td:eq(2)').hasClass('unittest');
}
);
oTest.fnTest(
"Add a class to a single column - has not applied to other columns - 1st",
null,
function () {
return $('#example tbody tr:eq(3) td:eq(0)').hasClass('unittest') == false;
}
);
oTest.fnTest(
"Add a class to a single column - has not applied to other columns - 5th",
null,
function () {
return $('#example tbody tr:eq(3) td:eq(4)').hasClass('unittest') == false;
}
);
oTest.fnTest(
"Add a class to a single column - seventh row - second page",
function () {
$('#example_next').click();
},
function () {
return $('#example tbody tr:eq(6) td:eq(2)').hasClass('unittest');
}
);
oTest.fnTest(
"Add a class to a single column - has not applied to header",
null,
function () {
return $('#example thead tr:eq(3) th:eq(4)').hasClass('unittest') == false;
}
);
oTest.fnTest(
"Add a class to a single column - has not applied to footer",
null,
function () {
return $('#example thead tr:eq(3) th:eq(4)').hasClass('unittest') == false;
}
);
oTest.fnTest(
"Class defined for multiple columns - first row",
function () {
oSession.fnRestore();
$('#example').dataTable({
"aaData": gaaData,
"aoColumns": [
{"sClass": 'unittest2'},
null,
null,
{"sClass": 'unittest1'},
null
]
});
},
function () {
var bReturn =
$('#example tbody tr:eq(3) td:eq(0)').hasClass('unittest2') &&
$('#example tbody tr:eq(8) td:eq(3)').hasClass('unittest1');
return bReturn;
}
);
oTest.fnTest(
"Class defined for multiple columns - has not applied to other columns - 5th 1",
null,
function () {
return $('#example tbody tr:eq(0) td:eq(4)').hasClass('unittest1') == false;
}
);
oTest.fnTest(
"Class defined for multiple columns - has not applied to other columns - 5th 2",
null,
function () {
return $('#example tbody tr:eq(6) td:eq(4)').hasClass('unittest2') == false;
}
);
oTest.fnComplete();
}); | {
"content_hash": "e6c90f8fb44a0f9e656bd1c63f9ccfe9",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 88,
"avg_line_length": 27.964028776978417,
"alnum_prop": 0.4684846925649601,
"repo_name": "tanvirshuvo/abtec",
"id": "d8f0bc6b544b5422dfd13ecc29e13324f2ea810c",
"size": "3887",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "target/ABTeC/resources/assets/advanced-datatable/media/unit_testing/tests_onhold/2_js/aoColumns.sClass.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "34910"
},
{
"name": "ApacheConf",
"bytes": "2398"
},
{
"name": "CSS",
"bytes": "1905278"
},
{
"name": "CoffeeScript",
"bytes": "161324"
},
{
"name": "Go",
"bytes": "27576"
},
{
"name": "HTML",
"bytes": "10322250"
},
{
"name": "Java",
"bytes": "305494"
},
{
"name": "JavaScript",
"bytes": "14345880"
},
{
"name": "Makefile",
"bytes": "794"
},
{
"name": "PHP",
"bytes": "494038"
},
{
"name": "Python",
"bytes": "22034"
},
{
"name": "Shell",
"bytes": "14822"
}
],
"symlink_target": ""
} |
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" xmlns:junos="http://xml.juniper.net/junos/10.4R3/junos">
<pipe>
<more-no-more/>
</pipe>
<cli>
<ignore-signals>
hup
</ignore-signals>
</cli>
<output>
Verified junos-install-mx-x86-64-16.1-20160925.0 signed by PackageDevelopmentEc_2016
Verified manifest signed by PackageDevelopmentEc_2016
Checking PIC combinations
Verified fips-mode signed by PackageDevelopmentEc_2016
Verified jail-runtime signed by PackageDevelopmentEc_2016
Verified jdocs signed by PackageDevelopmentEc_2016
Verified jpfe-X960 signed by PackageDevelopmentEc_2016
Verified jpfe-common signed by PackageDevelopmentEc_2016
Verified jpfe-wrlinux signed by PackageDevelopmentEc_2016
Verified jsd signed by PackageDevelopmentEc_2016
Verified vrr-mx signed by PackageDevelopmentEc_2016
</output>
<package-result>
0
</package-result>
</rpc-reply>
| {
"content_hash": "4a6b589efa2bfd839cd7e651523fcdd2",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 115,
"avg_line_length": 40,
"alnum_prop": 0.6817307692307693,
"repo_name": "Juniper/py-junos-eznc",
"id": "aa3a9a0503dce22fe510a2f240025f1bbd59c79b",
"size": "1040",
"binary": false,
"copies": "21",
"ref": "refs/heads/master",
"path": "tests/unit/utils/rpc-reply/request-package-in-service-upgrade.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "856"
},
{
"name": "Pascal",
"bytes": "408"
},
{
"name": "Puppet",
"bytes": "2263"
},
{
"name": "Python",
"bytes": "1101958"
},
{
"name": "Ruby",
"bytes": "134"
},
{
"name": "Shell",
"bytes": "1516"
}
],
"symlink_target": ""
} |
require "abstract_unit"
require "active_support/testing/stream"
class Deprecatee
def initialize
@request = ActiveSupport::Deprecation::DeprecatedInstanceVariableProxy.new(self, :request)
@_request = "there we go"
end
def request; @_request end
def old_request; @request end
def partially(foo = nil)
ActiveSupport::Deprecation.warn("calling with foo=nil is out") if foo.nil?
end
def not() 2 end
def none() 1 end
def one(a) a end
def multi(a, b, c) [a, b, c] end
deprecate :none, :one, :multi
def a; end
def b; end
def c; end
def d; end
def e; end
deprecate :a, :b, c: :e, d: "you now need to do something extra for this one"
def f=(v); end
deprecate :f=
deprecate :g
def g; end
module B
C = 1
end
A = ActiveSupport::Deprecation::DeprecatedConstantProxy.new("Deprecatee::A", "Deprecatee::B::C")
module New
class Descendant; end
end
Old = ActiveSupport::Deprecation::DeprecatedConstantProxy.new("Deprecatee::Old", "Deprecatee::New")
end
class DeprecateeWithAccessor
include ActiveSupport::Deprecation::DeprecatedConstantAccessor
module B
C = 1
end
deprecate_constant "A", "DeprecateeWithAccessor::B::C"
class NewException < StandardError; end
deprecate_constant "OldException", "DeprecateeWithAccessor::NewException"
end
class DeprecationTest < ActiveSupport::TestCase
include ActiveSupport::Testing::Stream
def setup
# Track the last warning.
@old_behavior = ActiveSupport::Deprecation.behavior
@last_message = nil
ActiveSupport::Deprecation.behavior = Proc.new { |message| @last_message = message }
@dtc = Deprecatee.new
end
def teardown
ActiveSupport::Deprecation.behavior = @old_behavior
end
def test_inline_deprecation_warning
assert_deprecated(/foo=nil/) do
@dtc.partially
end
end
def test_undeprecated
assert_not_deprecated do
assert_equal 2, @dtc.not
end
end
def test_deprecate_class_method
assert_deprecated(/none is deprecated/) do
assert_equal 1, @dtc.none
end
assert_deprecated(/one is deprecated/) do
assert_equal 1, @dtc.one(1)
end
assert_deprecated(/multi is deprecated/) do
assert_equal [1, 2, 3], @dtc.multi(1, 2, 3)
end
end
def test_deprecate_object
deprecated_object = ActiveSupport::Deprecation::DeprecatedObjectProxy.new(Object.new, ":bomb:")
assert_deprecated(/:bomb:/) { deprecated_object.to_s }
end
def test_nil_behavior_is_ignored
ActiveSupport::Deprecation.behavior = nil
assert_deprecated(/foo=nil/) { @dtc.partially }
end
def test_several_behaviors
@a, @b, @c = nil, nil, nil
ActiveSupport::Deprecation.behavior = [
lambda { |msg, callstack, horizon, gem| @a = msg },
lambda { |msg, callstack| @b = msg },
lambda { |*args| @c = args },
]
@dtc.partially
assert_match(/foo=nil/, @a)
assert_match(/foo=nil/, @b)
assert_equal 4, @c.size
end
def test_raise_behaviour
ActiveSupport::Deprecation.behavior = :raise
message = "Revise this deprecated stuff now!"
callstack = caller_locations
e = assert_raise ActiveSupport::DeprecationException do
ActiveSupport::Deprecation.behavior.first.call(message, callstack, "horizon", "gem")
end
assert_equal message, e.message
assert_equal callstack.map(&:to_s), e.backtrace.map(&:to_s)
end
def test_default_stderr_behavior
ActiveSupport::Deprecation.behavior = :stderr
behavior = ActiveSupport::Deprecation.behavior.first
content = capture(:stderr) {
assert_nil behavior.call("Some error!", ["call stack!"], "horizon", "gem")
}
assert_match(/Some error!/, content)
assert_match(/call stack!/, content)
end
def test_default_stderr_behavior_with_warn_method
ActiveSupport::Deprecation.behavior = :stderr
content = capture(:stderr) {
ActiveSupport::Deprecation.warn("Instance error!", ["instance call stack!"])
}
assert_match(/Instance error!/, content)
assert_match(/instance call stack!/, content)
end
def test_default_silence_behavior
ActiveSupport::Deprecation.behavior = :silence
behavior = ActiveSupport::Deprecation.behavior.first
stderr_output = capture(:stderr) {
assert_nil behavior.call("Some error!", ["call stack!"], "horizon", "gem")
}
assert_empty stderr_output
end
def test_default_notify_behavior
ActiveSupport::Deprecation.behavior = :notify
behavior = ActiveSupport::Deprecation.behavior.first
begin
events = []
ActiveSupport::Notifications.subscribe("deprecation.my_gem_custom") { |*args|
events << args.extract_options!
}
assert_nil behavior.call("Some error!", ["call stack!"], "horizon", "MyGem::Custom")
assert_equal 1, events.size
assert_equal "Some error!", events.first[:message]
assert_equal ["call stack!"], events.first[:callstack]
assert_equal "horizon", events.first[:deprecation_horizon]
assert_equal "MyGem::Custom", events.first[:gem_name]
ensure
ActiveSupport::Notifications.unsubscribe("deprecation.my_gem_custom")
end
end
def test_default_invalid_behavior
e = assert_raises(ArgumentError) do
ActiveSupport::Deprecation.behavior = :invalid
end
assert_equal ":invalid is not a valid deprecation behavior.", e.message
end
def test_deprecated_instance_variable_proxy
assert_not_deprecated { @dtc.request.size }
assert_deprecated("@request.size") { assert_equal @dtc.request.size, @dtc.old_request.size }
assert_deprecated("@request.to_s") { assert_equal @dtc.request.to_s, @dtc.old_request.to_s }
end
def test_deprecated_instance_variable_proxy_shouldnt_warn_on_inspect
assert_not_deprecated { assert_equal @dtc.request.inspect, @dtc.old_request.inspect }
end
def test_deprecated_constant_proxy
assert_not_deprecated { Deprecatee::B::C }
assert_deprecated("Deprecatee::A") { assert_equal Deprecatee::B::C, Deprecatee::A }
assert_not_deprecated { assert_equal Deprecatee::B::C.class, Deprecatee::A.class }
end
def test_deprecated_constant_descendant
assert_not_deprecated { Deprecatee::New::Descendant }
assert_deprecated("Deprecatee::Old") do
assert_equal Deprecatee::Old::Descendant, Deprecatee::New::Descendant
end
assert_raises(NameError) do
assert_deprecated("Deprecatee::Old") { Deprecatee::Old::NON_EXISTENCE }
end
end
def test_deprecated_constant_accessor
assert_not_deprecated { DeprecateeWithAccessor::B::C }
assert_deprecated("DeprecateeWithAccessor::A") { assert_equal DeprecateeWithAccessor::B::C, DeprecateeWithAccessor::A }
end
def test_deprecated_constant_accessor_exception
raise DeprecateeWithAccessor::NewException.new("Test")
rescue DeprecateeWithAccessor::OldException => e
assert_kind_of DeprecateeWithAccessor::NewException, e
end
def test_assert_deprecated_raises_when_method_not_deprecated
assert_raises(Minitest::Assertion) { assert_deprecated { @dtc.not } }
end
def test_assert_not_deprecated
assert_raises(Minitest::Assertion) { assert_not_deprecated { @dtc.partially } }
end
def test_assert_deprecation_without_match
assert_deprecated do
@dtc.partially
end
end
def test_assert_deprecated_matches_any_warning
assert_deprecated "abc" do
ActiveSupport::Deprecation.warn "abc"
ActiveSupport::Deprecation.warn "def"
end
rescue Minitest::Assertion
flunk "assert_deprecated should match any warning in block, not just the last one"
end
def test_assert_not_deprecated_returns_result_of_block
assert_equal 123, assert_not_deprecated { 123 }
end
def test_assert_deprecated_returns_result_of_block
result = assert_deprecated("abc") do
ActiveSupport::Deprecation.warn "abc"
123
end
assert_equal 123, result
end
def test_assert_deprecated_warn_work_with_default_behavior
ActiveSupport::Deprecation.instance_variable_set("@behavior", nil)
assert_deprecated("abc") do
ActiveSupport::Deprecation.warn "abc"
end
end
def test_silence
ActiveSupport::Deprecation.silence do
assert_not_deprecated { @dtc.partially }
end
ActiveSupport::Deprecation.silenced = true
assert_not_deprecated { @dtc.partially }
ActiveSupport::Deprecation.silenced = false
end
def test_deprecation_without_explanation
assert_deprecated { @dtc.a }
assert_deprecated { @dtc.b }
assert_deprecated { @dtc.f = :foo }
end
def test_deprecation_with_alternate_method
assert_deprecated(/use e instead/) { @dtc.c }
end
def test_deprecation_with_explicit_message
assert_deprecated(/you now need to do something extra for this one/) { @dtc.d }
end
def test_deprecation_in_other_object
messages = []
klass = Class.new do
delegate :warn, :behavior=, to: ActiveSupport::Deprecation
end
o = klass.new
o.behavior = Proc.new { |message, callstack| messages << message }
assert_difference("messages.size") do
o.warn("warning")
end
end
def test_deprecated_method_with_custom_method_warning
deprecator = deprecator_with_messages
class << deprecator
private
def deprecated_method_warning(method, message)
"deprecator.deprecated_method_warning.#{method}"
end
end
deprecatee = Class.new do
def method
end
deprecate :method, deprecator: deprecator
end
deprecatee.new.method
assert deprecator.messages.first.match("DEPRECATION WARNING: deprecator.deprecated_method_warning.method")
end
def test_deprecate_with_custom_deprecator
custom_deprecator = Struct.new(:deprecation_warning).new
assert_called_with(custom_deprecator, :deprecation_warning, [:method, nil]) do
klass = Class.new do
def method
end
deprecate :method, deprecator: custom_deprecator
end
klass.new.method
end
end
def test_deprecated_constant_with_deprecator_given
deprecator = deprecator_with_messages
klass = Class.new
klass.const_set(:OLD, ActiveSupport::Deprecation::DeprecatedConstantProxy.new("klass::OLD", "Object", deprecator))
assert_difference("deprecator.messages.size") do
klass::OLD.to_s
end
end
def test_deprecated_constant_with_custom_message
deprecator = deprecator_with_messages
klass = Class.new
klass.const_set(:OLD, ActiveSupport::Deprecation::DeprecatedConstantProxy.new("klass::OLD", "Object", deprecator, message: "foo"))
klass::OLD.to_s
assert_match "foo", deprecator.messages.last
end
def test_deprecated_instance_variable_with_instance_deprecator
deprecator = deprecator_with_messages
klass = Class.new() do
def initialize(deprecator)
@request = ActiveSupport::Deprecation::DeprecatedInstanceVariableProxy.new(self, :request, :@request, deprecator)
@_request = :a_request
end
def request; @_request end
def old_request; @request end
end
assert_difference("deprecator.messages.size") { klass.new(deprecator).old_request.to_s }
end
def test_deprecated_instance_variable_with_given_deprecator
deprecator = deprecator_with_messages
klass = Class.new do
define_method(:initialize) do
@request = ActiveSupport::Deprecation::DeprecatedInstanceVariableProxy.new(self, :request, :@request, deprecator)
@_request = :a_request
end
def request; @_request end
def old_request; @request end
end
assert_difference("deprecator.messages.size") { klass.new.old_request.to_s }
end
def test_delegate_deprecator_instance
klass = Class.new do
attr_reader :last_message
delegate :warn, :behavior=, to: ActiveSupport::Deprecation
def initialize
self.behavior = [Proc.new { |message| @last_message = message }]
end
def deprecated_method
warn(deprecated_method_warning(:deprecated_method, "You are calling deprecated method"))
end
private
def deprecated_method_warning(method_name, message = nil)
message || "#{method_name} is deprecated and will be removed from This Library"
end
end
object = klass.new
object.deprecated_method
assert_match(/You are calling deprecated method/, object.last_message)
end
def test_default_deprecation_horizon_should_always_bigger_than_current_rails_version
assert_operator ActiveSupport::Deprecation.new.deprecation_horizon, :>, ActiveSupport::VERSION::STRING
end
def test_default_gem_name
deprecator = ActiveSupport::Deprecation.new
deprecator.send(:deprecated_method_warning, :deprecated_method, "You are calling deprecated method").tap do |message|
assert_match(/is deprecated and will be removed from Rails/, message)
end
end
def test_custom_gem_name
deprecator = ActiveSupport::Deprecation.new("2.0", "Custom")
deprecator.send(:deprecated_method_warning, :deprecated_method, "You are calling deprecated method").tap do |message|
assert_match(/is deprecated and will be removed from Custom/, message)
end
end
def test_deprecate_work_before_define_method
assert_deprecated { @dtc.g }
end
private
def deprecator_with_messages
klass = Class.new(ActiveSupport::Deprecation)
deprecator = klass.new
deprecator.behavior = Proc.new { |message, callstack| deprecator.messages << message }
def deprecator.messages
@messages ||= []
end
deprecator
end
end
| {
"content_hash": "0e6961c0a271bf9fc6ee133a527da8b3",
"timestamp": "",
"source": "github",
"line_count": 460,
"max_line_length": 134,
"avg_line_length": 29.51304347826087,
"alnum_prop": 0.6953447259870359,
"repo_name": "arunagw/rails",
"id": "9898a1d9a0fa1800ce5c3590c05681655ccc94b6",
"size": "13607",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "activesupport/test/deprecation_test.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "53461"
},
{
"name": "CoffeeScript",
"bytes": "24531"
},
{
"name": "HTML",
"bytes": "494925"
},
{
"name": "JavaScript",
"bytes": "185220"
},
{
"name": "Ruby",
"bytes": "13094216"
},
{
"name": "Shell",
"bytes": "4531"
},
{
"name": "Yacc",
"bytes": "983"
}
],
"symlink_target": ""
} |
#include "config.h"
#include "WebPluginContainerImpl.h"
#include "Chrome.h"
#include "ChromeClientImpl.h"
#include "PluginLayerChromium.h"
#include "WebClipboard.h"
#include "WebCursorInfo.h"
#include "WebDataSourceImpl.h"
#include "WebElement.h"
#include "WebInputEvent.h"
#include "WebInputEventConversion.h"
#include "WebKit.h"
#include "WebKitClient.h"
#include "WebPlugin.h"
#include "WebRect.h"
#include "WebString.h"
#include "WebURL.h"
#include "WebURLError.h"
#include "WebURLRequest.h"
#include "WebVector.h"
#include "WebViewImpl.h"
#include "WrappedResourceResponse.h"
#include "EventNames.h"
#include "FocusController.h"
#include "FormState.h"
#include "Frame.h"
#include "FrameLoadRequest.h"
#include "FrameView.h"
#include "GraphicsContext.h"
#include "HostWindow.h"
#include "HTMLFormElement.h"
#include "HTMLNames.h"
#include "HTMLPlugInElement.h"
#include "IFrameShimSupport.h"
#include "KeyboardCodes.h"
#include "KeyboardEvent.h"
#include "MouseEvent.h"
#include "Page.h"
#include "RenderBox.h"
#include "ScrollView.h"
#include "UserGestureIndicator.h"
#include "WheelEvent.h"
#if WEBKIT_USING_SKIA
#include "PlatformContextSkia.h"
#endif
using namespace WebCore;
namespace WebKit {
// Public methods --------------------------------------------------------------
void WebPluginContainerImpl::setFrameRect(const IntRect& frameRect)
{
Widget::setFrameRect(frameRect);
reportGeometry();
}
void WebPluginContainerImpl::paint(GraphicsContext* gc, const IntRect& damageRect)
{
if (gc->paintingDisabled())
return;
if (!parent())
return;
// Don't paint anything if the plugin doesn't intersect the damage rect.
if (!frameRect().intersects(damageRect))
return;
gc->save();
ASSERT(parent()->isFrameView());
ScrollView* view = parent();
// The plugin is positioned in window coordinates, so it needs to be painted
// in window coordinates.
IntPoint origin = view->windowToContents(IntPoint(0, 0));
gc->translate(static_cast<float>(origin.x()), static_cast<float>(origin.y()));
#if WEBKIT_USING_SKIA
WebCanvas* canvas = gc->platformContext()->canvas();
#elif WEBKIT_USING_CG
WebCanvas* canvas = gc->platformContext();
#endif
IntRect windowRect =
IntRect(view->contentsToWindow(damageRect.location()), damageRect.size());
m_webPlugin->paint(canvas, windowRect);
gc->restore();
}
void WebPluginContainerImpl::invalidateRect(const IntRect& rect)
{
if (!parent())
return;
RenderBox* renderer = toRenderBox(m_element->renderer());
IntRect dirtyRect = rect;
dirtyRect.move(renderer->borderLeft() + renderer->paddingLeft(),
renderer->borderTop() + renderer->paddingTop());
renderer->repaintRectangle(dirtyRect);
}
void WebPluginContainerImpl::setFocus(bool focused)
{
Widget::setFocus(focused);
m_webPlugin->updateFocus(focused);
}
void WebPluginContainerImpl::show()
{
setSelfVisible(true);
m_webPlugin->updateVisibility(true);
Widget::show();
}
void WebPluginContainerImpl::hide()
{
setSelfVisible(false);
m_webPlugin->updateVisibility(false);
Widget::hide();
}
void WebPluginContainerImpl::handleEvent(Event* event)
{
if (!m_webPlugin->acceptsInputEvents())
return;
RefPtr<WebPluginContainerImpl> protector(this);
// The events we pass are defined at:
// http://devedge-temp.mozilla.org/library/manuals/2002/plugin/1.0/structures5.html#1000000
// Don't take the documentation as truth, however. There are many cases
// where mozilla behaves differently than the spec.
if (event->isMouseEvent())
handleMouseEvent(static_cast<MouseEvent*>(event));
else if (event->isWheelEvent())
handleWheelEvent(static_cast<WheelEvent*>(event));
else if (event->isKeyboardEvent())
handleKeyboardEvent(static_cast<KeyboardEvent*>(event));
// FIXME: it would be cleaner if Widget::handleEvent returned true/false and
// HTMLPluginElement called setDefaultHandled or defaultEventHandler.
if (!event->defaultHandled())
m_element->Node::defaultEventHandler(event);
}
void WebPluginContainerImpl::frameRectsChanged()
{
Widget::frameRectsChanged();
reportGeometry();
}
void WebPluginContainerImpl::widgetPositionsUpdated()
{
Widget::widgetPositionsUpdated();
reportGeometry();
}
void WebPluginContainerImpl::setParentVisible(bool parentVisible)
{
// We override this function to make sure that geometry updates are sent
// over to the plugin. For e.g. when a plugin is instantiated it does not
// have a valid parent. As a result the first geometry update from webkit
// is ignored. This function is called when the plugin eventually gets a
// parent.
if (isParentVisible() == parentVisible)
return; // No change.
Widget::setParentVisible(parentVisible);
if (!isSelfVisible())
return; // This widget has explicitely been marked as not visible.
m_webPlugin->updateVisibility(isVisible());
}
void WebPluginContainerImpl::setParent(ScrollView* view)
{
// We override this function so that if the plugin is windowed, we can call
// NPP_SetWindow at the first possible moment. This ensures that
// NPP_SetWindow is called before the manual load data is sent to a plugin.
// If this order is reversed, Flash won't load videos.
Widget::setParent(view);
if (view)
reportGeometry();
}
bool WebPluginContainerImpl::supportsPaginatedPrint() const
{
return m_webPlugin->supportsPaginatedPrint();
}
int WebPluginContainerImpl::printBegin(const IntRect& printableArea,
int printerDPI) const
{
return m_webPlugin->printBegin(printableArea, printerDPI);
}
bool WebPluginContainerImpl::printPage(int pageNumber,
WebCore::GraphicsContext* gc)
{
gc->save();
#if WEBKIT_USING_SKIA
WebCanvas* canvas = gc->platformContext()->canvas();
#elif WEBKIT_USING_CG
WebCanvas* canvas = gc->platformContext();
#endif
bool ret = m_webPlugin->printPage(pageNumber, canvas);
gc->restore();
return ret;
}
void WebPluginContainerImpl::printEnd()
{
return m_webPlugin->printEnd();
}
void WebPluginContainerImpl::copy()
{
if (!m_webPlugin->hasSelection())
return;
webKitClient()->clipboard()->writeHTML(m_webPlugin->selectionAsMarkup(), WebURL(), m_webPlugin->selectionAsText(), false);
}
WebElement WebPluginContainerImpl::element()
{
return WebElement(m_element);
}
void WebPluginContainerImpl::invalidate()
{
Widget::invalidate();
}
void WebPluginContainerImpl::invalidateRect(const WebRect& rect)
{
invalidateRect(static_cast<IntRect>(rect));
}
void WebPluginContainerImpl::scrollRect(int dx, int dy, const WebRect& rect)
{
Widget* parentWidget = parent();
if (parentWidget->isFrameView()) {
FrameView* parentFrameView = static_cast<FrameView*>(parentWidget);
if (!parentFrameView->isOverlapped()) {
IntRect damageRect = convertToContainingWindow(static_cast<IntRect>(rect));
IntSize scrollDelta(dx, dy);
// scroll() only uses the second rectangle, clipRect, and ignores the first
// rectangle.
parent()->hostWindow()->scroll(scrollDelta, damageRect, damageRect);
return;
}
}
// Use slow scrolling instead.
invalidateRect(rect);
}
void WebPluginContainerImpl::reportGeometry()
{
if (!parent())
return;
IntRect windowRect, clipRect;
Vector<IntRect> cutOutRects;
calculateGeometry(frameRect(), windowRect, clipRect, cutOutRects);
m_webPlugin->updateGeometry(windowRect, clipRect, cutOutRects, isVisible());
}
void WebPluginContainerImpl::setBackingTextureId(unsigned id)
{
#if USE(ACCELERATED_COMPOSITING)
unsigned currId = m_platformLayer->textureId();
if (currId == id)
return;
m_platformLayer->setTextureId(id);
// If anyone of the IDs is zero we need to switch between hardware
// and software compositing. This is done by triggering a style recalc
// on the container element.
if (!(currId * id))
m_element->setNeedsStyleRecalc(WebCore::SyntheticStyleChange);
#endif
}
void WebPluginContainerImpl::commitBackingTexture()
{
#if USE(ACCELERATED_COMPOSITING)
if (platformLayer())
platformLayer()->setNeedsDisplay();
#endif
}
void WebPluginContainerImpl::clearScriptObjects()
{
Frame* frame = m_element->document()->frame();
if (!frame)
return;
frame->script()->cleanupScriptObjectsForPlugin(this);
}
NPObject* WebPluginContainerImpl::scriptableObjectForElement()
{
return m_element->getNPObject();
}
WebString WebPluginContainerImpl::executeScriptURL(const WebURL& url, bool popupsAllowed)
{
Frame* frame = m_element->document()->frame();
if (!frame)
return WebString();
const KURL& kurl = url;
ASSERT(kurl.protocolIs("javascript"));
String script = decodeURLEscapeSequences(
kurl.string().substring(strlen("javascript:")));
ScriptValue result = frame->script()->executeScript(script, popupsAllowed);
// Failure is reported as a null string.
String resultStr;
result.getString(resultStr);
return resultStr;
}
void WebPluginContainerImpl::loadFrameRequest(
const WebURLRequest& request, const WebString& target, bool notifyNeeded, void* notifyData)
{
Frame* frame = m_element->document()->frame();
if (!frame)
return; // FIXME: send a notification in this case?
if (notifyNeeded) {
// FIXME: This is a bit of hack to allow us to observe completion of
// our frame request. It would be better to evolve FrameLoader to
// support a completion callback instead.
WebPluginLoadObserver* observer =
new WebPluginLoadObserver(this, request.url(), notifyData);
m_pluginLoadObservers.append(observer);
WebDataSourceImpl::setNextPluginLoadObserver(observer);
}
FrameLoadRequest frameRequest(frame->document()->securityOrigin(),
request.toResourceRequest(), target);
UserGestureIndicator gestureIndicator(request.hasUserGesture() ?
DefinitelyProcessingUserGesture : DefinitelyNotProcessingUserGesture);
frame->loader()->loadFrameRequest(
frameRequest,
false, // lock history
false, // lock back forward list
0, // event
0, // form state
SendReferrer);
}
void WebPluginContainerImpl::zoomLevelChanged(double zoomLevel)
{
WebViewImpl* view = WebViewImpl::fromPage(m_element->document()->frame()->page());
view->fullFramePluginZoomLevelChanged(zoomLevel);
}
void WebPluginContainerImpl::didReceiveResponse(const ResourceResponse& response)
{
// Make sure that the plugin receives window geometry before data, or else
// plugins misbehave.
frameRectsChanged();
WrappedResourceResponse urlResponse(response);
m_webPlugin->didReceiveResponse(urlResponse);
}
void WebPluginContainerImpl::didReceiveData(const char *data, int dataLength)
{
m_webPlugin->didReceiveData(data, dataLength);
}
void WebPluginContainerImpl::didFinishLoading()
{
m_webPlugin->didFinishLoading();
}
void WebPluginContainerImpl::didFailLoading(const ResourceError& error)
{
m_webPlugin->didFailLoading(error);
}
NPObject* WebPluginContainerImpl::scriptableObject()
{
return m_webPlugin->scriptableObject();
}
void WebPluginContainerImpl::willDestroyPluginLoadObserver(WebPluginLoadObserver* observer)
{
size_t pos = m_pluginLoadObservers.find(observer);
if (pos == notFound)
return;
m_pluginLoadObservers.remove(pos);
}
#if USE(ACCELERATED_COMPOSITING)
WebCore::LayerChromium* WebPluginContainerImpl::platformLayer() const
{
return m_platformLayer->textureId() ? m_platformLayer.get() : 0;
}
#endif
// Private methods -------------------------------------------------------------
WebPluginContainerImpl::WebPluginContainerImpl(WebCore::HTMLPlugInElement* element, WebPlugin* webPlugin)
: WebCore::PluginViewBase(0)
, m_element(element)
, m_webPlugin(webPlugin)
#if USE(ACCELERATED_COMPOSITING)
, m_platformLayer(PluginLayerChromium::create(0))
#endif
{
}
WebPluginContainerImpl::~WebPluginContainerImpl()
{
for (size_t i = 0; i < m_pluginLoadObservers.size(); ++i)
m_pluginLoadObservers[i]->clearPluginContainer();
m_webPlugin->destroy();
}
void WebPluginContainerImpl::handleMouseEvent(MouseEvent* event)
{
ASSERT(parent()->isFrameView());
// We cache the parent FrameView here as the plugin widget could be deleted
// in the call to HandleEvent. See http://b/issue?id=1362948
FrameView* parentView = static_cast<FrameView*>(parent());
WebMouseEventBuilder webEvent(this, *event);
if (webEvent.type == WebInputEvent::Undefined)
return;
if (event->type() == eventNames().mousedownEvent) {
Frame* containingFrame = parentView->frame();
if (Page* currentPage = containingFrame->page())
currentPage->focusController()->setFocusedNode(m_element, containingFrame);
else
containingFrame->document()->setFocusedNode(m_element);
}
WebCursorInfo cursorInfo;
if (m_webPlugin->handleInputEvent(webEvent, cursorInfo))
event->setDefaultHandled();
// A windowless plugin can change the cursor in response to a mouse move
// event. We need to reflect the changed cursor in the frame view as the
// mouse is moved in the boundaries of the windowless plugin.
Page* page = parentView->frame()->page();
if (!page)
return;
ChromeClientImpl* chromeClient =
static_cast<ChromeClientImpl*>(page->chrome()->client());
chromeClient->setCursorForPlugin(cursorInfo);
}
void WebPluginContainerImpl::handleWheelEvent(WheelEvent* event)
{
WebMouseWheelEventBuilder webEvent(this, *event);
if (webEvent.type == WebInputEvent::Undefined)
return;
WebCursorInfo cursorInfo;
if (m_webPlugin->handleInputEvent(webEvent, cursorInfo))
event->setDefaultHandled();
}
void WebPluginContainerImpl::handleKeyboardEvent(KeyboardEvent* event)
{
WebKeyboardEventBuilder webEvent(*event);
if (webEvent.type == WebInputEvent::Undefined)
return;
if (webEvent.type == WebInputEvent::KeyDown) {
#if defined(OS_MACOSX)
if (webEvent.modifiers == WebInputEvent::MetaKey
#else
if (webEvent.modifiers == WebInputEvent::ControlKey
#endif
&& webEvent.windowsKeyCode == VKEY_C
// Only copy if there's a selection, so that we only ever do this
// for Pepper plugins that support copying. Windowless NPAPI
// plugins will get the event as before.
&& m_webPlugin->hasSelection()) {
copy();
event->setDefaultHandled();
return;
}
}
const WebInputEvent* currentInputEvent = WebViewImpl::currentInputEvent();
// Copy stashed info over, and only copy here in order not to interfere
// the ctrl-c logic above.
if (currentInputEvent
&& WebInputEvent::isKeyboardEventType(currentInputEvent->type)) {
webEvent.modifiers |= currentInputEvent->modifiers &
(WebInputEvent::CapsLockOn | WebInputEvent::NumLockOn);
}
WebCursorInfo cursorInfo;
if (m_webPlugin->handleInputEvent(webEvent, cursorInfo))
event->setDefaultHandled();
}
void WebPluginContainerImpl::calculateGeometry(const IntRect& frameRect,
IntRect& windowRect,
IntRect& clipRect,
Vector<IntRect>& cutOutRects)
{
windowRect = IntRect(
parent()->contentsToWindow(frameRect.location()), frameRect.size());
// Calculate a clip-rect so that we don't overlap the scrollbars, etc.
clipRect = windowClipRect();
clipRect.move(-windowRect.x(), -windowRect.y());
getPluginOcclusions(m_element, this->parent(), frameRect, cutOutRects);
// Convert to the plugin position.
for (size_t i = 0; i < cutOutRects.size(); i++)
cutOutRects[i].move(-frameRect.x(), -frameRect.y());
}
WebCore::IntRect WebPluginContainerImpl::windowClipRect() const
{
// Start by clipping to our bounds.
IntRect clipRect =
convertToContainingWindow(IntRect(0, 0, width(), height()));
// document()->renderer() can be 0 when we receive messages from the
// plugins while we are destroying a frame.
if (m_element->renderer()->document()->renderer()) {
// Take our element and get the clip rect from the enclosing layer and
// frame view.
RenderLayer* layer = m_element->renderer()->enclosingLayer();
clipRect.intersect(
m_element->document()->view()->windowClipRectForLayer(layer, true));
}
return clipRect;
}
} // namespace WebKit
| {
"content_hash": "461a2ee90b945c2a4032c3b8d0f0505a",
"timestamp": "",
"source": "github",
"line_count": 563,
"max_line_length": 126,
"avg_line_length": 30.209591474245116,
"alnum_prop": 0.6867944496707432,
"repo_name": "mogoweb/webkit_for_android5.1",
"id": "150f1739c2868ef7d83aca4bded94b0152084d23",
"size": "18570",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "webkit/Source/WebKit/chromium/src/WebPluginContainerImpl.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AppleScript",
"bytes": "6772"
},
{
"name": "Assembly",
"bytes": "26025"
},
{
"name": "Awk",
"bytes": "2800"
},
{
"name": "Batchfile",
"bytes": "57337"
},
{
"name": "C",
"bytes": "7713030"
},
{
"name": "C++",
"bytes": "153178707"
},
{
"name": "CMake",
"bytes": "192330"
},
{
"name": "CSS",
"bytes": "483041"
},
{
"name": "Common Lisp",
"bytes": "9920"
},
{
"name": "DIGITAL Command Language",
"bytes": "5243"
},
{
"name": "DTrace",
"bytes": "1931"
},
{
"name": "Go",
"bytes": "3744"
},
{
"name": "HTML",
"bytes": "14998422"
},
{
"name": "Java",
"bytes": "1522083"
},
{
"name": "JavaScript",
"bytes": "18008829"
},
{
"name": "Lex",
"bytes": "42554"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "M4",
"bytes": "49839"
},
{
"name": "Makefile",
"bytes": "476166"
},
{
"name": "Module Management System",
"bytes": "9756"
},
{
"name": "Objective-C",
"bytes": "2798053"
},
{
"name": "Objective-C++",
"bytes": "7846322"
},
{
"name": "PHP",
"bytes": "66595"
},
{
"name": "Perl",
"bytes": "1130475"
},
{
"name": "Perl 6",
"bytes": "445215"
},
{
"name": "Python",
"bytes": "5503045"
},
{
"name": "QML",
"bytes": "3331"
},
{
"name": "QMake",
"bytes": "294800"
},
{
"name": "R",
"bytes": "290"
},
{
"name": "Roff",
"bytes": "273562"
},
{
"name": "Ruby",
"bytes": "81928"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "488223"
},
{
"name": "Yacc",
"bytes": "153801"
},
{
"name": "xBase",
"bytes": "328"
}
],
"symlink_target": ""
} |
@interface DropDownViewController ()<DOPDropDownMenuDataSource,DOPDropDownMenuDelegate>
@property (nonatomic, strong) NSArray *classifys;
@property (nonatomic, strong) NSArray *cates;
@property (nonatomic, strong) NSArray *movices;
@property (nonatomic, strong) NSArray *hostels;
@property (nonatomic, strong) NSArray *areas;
@property (nonatomic, strong) NSArray *sorts;
@property (nonatomic, weak) DOPDropDownMenu *menu;
@property (nonatomic, weak) DOPDropDownMenu *menuB;
@end
@implementation DropDownViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.title = @"DOPDropDownMenu";
UIButton *selectButton = [[UIButton alloc] initWithFrame:CGRectMake(20, 340, 300, 40)];
[selectButton setTitle:@"到指定的位置 0-2-2" forState: UIControlStateNormal];
[selectButton setTitleColor:[UIColor orangeColor] forState:UIControlStateNormal];
[self.view addSubview:selectButton];
[selectButton addTarget:self action:@selector(selectIndexPathAction:) forControlEvents:UIControlEventTouchUpInside];
self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc]initWithTitle:@"重新加载" style:UIBarButtonItemStylePlain target:self action:@selector(menuReloadData)];
// 数据
self.classifys = @[@"美食",@"今日新单",@"电影",@"酒店"];
self.cates = @[@"自助餐",@"快餐",@"火锅",@"日韩料理",@"西餐",@"烧烤小吃"];
self.movices = @[@"内地剧",@"港台剧",@"英美剧"];
self.hostels = @[@"经济酒店",@"商务酒店",@"连锁酒店",@"度假酒店",@"公寓酒店"];
self.areas = @[@"全城",@"芙蓉区",@"雨花区",@"天心区",@"开福区",@"岳麓区"];
self.sorts = @[@"默认排序",@"离我最近",@"好评优先",@"人气优先",@"最新发布"];
// 添加下拉菜单
DOPDropDownMenu *menu = [[DOPDropDownMenu alloc] initWithOrigin:CGPointMake(0, 64) andHeight:44];
menu.delegate = self;
menu.dataSource = self;
[self.view addSubview:menu];
_menu = menu;
//当下拉菜单收回时的回调,用于网络请求新的数据
_menu.finishedBlock=^(DOPIndexPath *indexPath){
if (indexPath.item >= 0) {
NSLog(@"收起:点击了 %ld - %ld - %ld 项目",indexPath.column,indexPath.row,indexPath.item);
}else {
NSLog(@"收起:点击了 %ld - %ld 项目",indexPath.column,indexPath.row);
}
};
// 创建menu 第一次显示 不会调用点击代理,可以用这个手动调用
// [menu selectDefalutIndexPath];
[menu selectIndexPath:[DOPIndexPath indexPathWithCol:0 row:0 item:0]];
}
- (void)menuReloadData
{
self.classifys = @[@"美食",@"今日新单",@"电影"];
[_menu reloadData];
}
- (void)selectIndexPathAction:(id)sender {
[_menu selectIndexPath:[DOPIndexPath indexPathWithCol:0 row:2 item:2]];
}
- (NSInteger)numberOfColumnsInMenu:(DOPDropDownMenu *)menu
{
return 3;
}
- (NSInteger)menu:(DOPDropDownMenu *)menu numberOfRowsInColumn:(NSInteger)column
{
if (column == 0) {
return self.classifys.count;
}else if (column == 1){
return self.areas.count;
}else {
return self.sorts.count;
}
}
- (NSString *)menu:(DOPDropDownMenu *)menu titleForRowAtIndexPath:(DOPIndexPath *)indexPath
{
if (indexPath.column == 0) {
return self.classifys[indexPath.row];
} else if (indexPath.column == 1){
return self.areas[indexPath.row];
} else {
return self.sorts[indexPath.row];
}
}
// new datasource
- (NSString *)menu:(DOPDropDownMenu *)menu imageNameForRowAtIndexPath:(DOPIndexPath *)indexPath
{
if (indexPath.column == 0 || indexPath.column == 1) {
return [NSString stringWithFormat:@"ic_filter_category_%ld",indexPath.row];
}
return nil;
}
- (NSString *)menu:(DOPDropDownMenu *)menu imageNameForItemsInRowAtIndexPath:(DOPIndexPath *)indexPath
{
if (indexPath.column == 0 && indexPath.item >= 0) {
return [NSString stringWithFormat:@"ic_filter_category_%ld",indexPath.item];
}
return nil;
}
// new datasource
- (NSString *)menu:(DOPDropDownMenu *)menu detailTextForRowAtIndexPath:(DOPIndexPath *)indexPath
{
if (indexPath.column < 2) {
return [@(arc4random()%1000) stringValue];
}
return nil;
}
- (NSString *)menu:(DOPDropDownMenu *)menu detailTextForItemsInRowAtIndexPath:(DOPIndexPath *)indexPath
{
return [@(arc4random()%1000) stringValue];
}
- (NSInteger)menu:(DOPDropDownMenu *)menu numberOfItemsInRow:(NSInteger)row column:(NSInteger)column
{
if (column == 0) {
if (row == 0) {
return self.cates.count;
} else if (row == 2){
return self.movices.count;
} else if (row == 3){
return self.hostels.count;
}
}
return 0;
}
- (NSString *)menu:(DOPDropDownMenu *)menu titleForItemsInRowAtIndexPath:(DOPIndexPath *)indexPath
{
if (indexPath.column == 0) {
if (indexPath.row == 0) {
return self.cates[indexPath.item];
} else if (indexPath.row == 2){
return self.movices[indexPath.item];
} else if (indexPath.row == 3){
return self.hostels[indexPath.item];
}
}
return nil;
}
- (void)menu:(DOPDropDownMenu *)menu didSelectRowAtIndexPath:(DOPIndexPath *)indexPath
{
if (indexPath.item >= 0) {
NSLog(@"点击了 %ld - %ld - %ld 项目",indexPath.column,indexPath.row,indexPath.item);
}else {
NSLog(@"点击了 %ld - %ld 项目",indexPath.column,indexPath.row);
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
| {
"content_hash": "4eadbed14be3e00613e9f9b489b08c1a",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 169,
"avg_line_length": 31.55952380952381,
"alnum_prop": 0.6537155790267823,
"repo_name": "lifei321/MyDemo",
"id": "66ecc4fe74a9e221aaa64aa815b21b34b399b707",
"size": "5892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MyDemo/ChildController/DropDownList(下拉列表)/second/DropDownViewController.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "1217789"
},
{
"name": "Ruby",
"bytes": "137"
}
],
"symlink_target": ""
} |
module Import
( module Import
) where
import Data.Time as Import (getCurrentTime)
import Prelude as Import hiding (head, init, last,
readFile, tail, writeFile)
import Yesod as Import hiding (Route (..))
import Control.Applicative as Import (pure, (<$>), (<*>))
import Data.Text as Import (Text)
import Foundation as Import
import Model as Import
import Settings as Import
import Settings.Development as Import
import Settings.StaticFiles as Import
#if __GLASGOW_HASKELL__ >= 704
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat),
(<>))
#else
import Data.Monoid as Import
(Monoid (mappend, mempty, mconcat))
infixr 5 <>
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
#endif
| {
"content_hash": "49e5af3fb02e51a31e13423f60814ab2",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 84,
"avg_line_length": 37.4,
"alnum_prop": 0.44563279857397503,
"repo_name": "ScaledSoftware/writemealist",
"id": "4bc5a3b69837553d9a901445fdf1357658b7306a",
"size": "1122",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Import.hs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "100742"
},
{
"name": "Haskell",
"bytes": "34327"
},
{
"name": "Shell",
"bytes": "1291"
}
],
"symlink_target": ""
} |
using System;
using System.Runtime.InteropServices;
namespace System.Reflection
{
// Summary:
// Represents a clause in a structured exception-handling block.
public class ExceptionHandlingClause
{
protected ExceptionHandlingClause() { }
// Summary:
// Gets the type of exception handled by this clause.
//
// Returns:
// A System.Type object that represents that type of exception handled by this
// clause, or null if the System.Reflection.ExceptionHandlingClause.Flags property
// is System.Reflection.ExceptionHandlingClauseOptions.Filter or System.Reflection.ExceptionHandlingClauseOptions.Finally.
//
// Exceptions:
// System.InvalidOperationException:
// Invalid use of property for the object's current state.
#if NETFRAMEWORK_3_5
#else
virtual
#endif
extern public Type CatchType { get; }
//
// Summary:
// Gets the offset within the method body, in bytes, of the user-supplied filter
// code.
//
// Returns:
// The offset within the method body, in bytes, of the user-supplied filter
// code. The value of this property has no meaning if the System.Reflection.ExceptionHandlingClause.Flags
// property has any value other than System.Reflection.ExceptionHandlingClauseOptions.Filter.
//
// Exceptions:
// System.InvalidOperationException:
// Cannot get the offset because the exception handling clause is not a filter.
#if NETFRAMEWORK_3_5
#else
virtual
#endif
extern public int FilterOffset { get; }
//
// Summary:
// Gets a value indicating whether this exception-handling clause is a finally
// clause, a type-filtered clause, or a user-filtered clause.
//
// Returns:
// An System.Reflection.ExceptionHandlingClauseOptions value that indicates
// what kind of action this clause performs.
#if NETFRAMEWORK_3_5
#else
virtual
#endif
extern public ExceptionHandlingClauseOptions Flags { get; }
//
// Summary:
// Gets the length, in bytes, of the body of this exception-handling clause.
//
// Returns:
// An integer that represents the length, in bytes, of the MSIL that forms the
// body of this exception-handling clause.
#if NETFRAMEWORK_3_5
#else
virtual
#endif
extern public int HandlerLength { get; }
//
// Summary:
// Gets the offset within the method body, in bytes, of this exception-handling
// clause.
//
// Returns:
// An integer that represents the offset within the method body, in bytes, of
// this exception-handling clause.
#if NETFRAMEWORK_3_5
#else
virtual
#endif
extern public int HandlerOffset { get; }
//
// Summary:
// The total length, in bytes, of the try block that includes this exception-handling
// clause.
//
// Returns:
// The total length, in bytes, of the try block that includes this exception-handling
// clause.
#if NETFRAMEWORK_3_5
#else
virtual
#endif
extern public int TryLength { get; }
//
// Summary:
// The offset within the method, in bytes, of the try block that includes this
// exception-handling clause.
//
// Returns:
// An integer that represents the offset within the method, in bytes, of the
// try block that includes this exception-handling clause.
#if NETFRAMEWORK_3_5
#else
virtual
#endif
extern public int TryOffset { get; }
}
}
#endif | {
"content_hash": "a285ea681b390dda78a8dbbacc5e80ca",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 130,
"avg_line_length": 31.18421052631579,
"alnum_prop": 0.6635724331926863,
"repo_name": "SergeyTeplyakov/CodeContracts",
"id": "876e26d1c14db1fa2bb244ebeec3031df4c88d5d",
"size": "4743",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "Microsoft.Research/Contracts/MsCorlib/System.Reflection.ExceptionHandlingClause.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "2883"
},
{
"name": "Batchfile",
"bytes": "26723"
},
{
"name": "C",
"bytes": "232630"
},
{
"name": "C#",
"bytes": "53485173"
},
{
"name": "C++",
"bytes": "504486"
},
{
"name": "F#",
"bytes": "418"
},
{
"name": "HTML",
"bytes": "2819"
},
{
"name": "JavaScript",
"bytes": "1544"
},
{
"name": "Makefile",
"bytes": "8484"
},
{
"name": "Perl",
"bytes": "8834"
},
{
"name": "PostScript",
"bytes": "1364"
},
{
"name": "PowerShell",
"bytes": "3542"
},
{
"name": "Python",
"bytes": "1906"
},
{
"name": "TeX",
"bytes": "61151"
},
{
"name": "Visual Basic",
"bytes": "245561"
},
{
"name": "XSLT",
"bytes": "102883"
}
],
"symlink_target": ""
} |
package com.gemstone.gemfire.internal.admin;
//import java.util.List;
/**
* Interface to represent one statistic resource
*
* @author Darrel Schneider
* @author Kirk Lund
*/
public interface StatResource extends GfObject {
public long getResourceID();
public long getResourceUniqueID();
public String getSystemName();
public GemFireVM getGemFireVM();
public Stat[] getStats();
public Stat getStatByName(String name);
public String getName();
/**
* @return the full description of this statistic resource
*/
public String getDescription();
}
| {
"content_hash": "4e4604dfa0c10860f4469fb19ce7682d",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 60,
"avg_line_length": 21.48148148148148,
"alnum_prop": 0.7241379310344828,
"repo_name": "zuowang/incubator-geode",
"id": "01a9ca3d6d7f3230cc668926ee7d2a7cc6939603",
"size": "995",
"binary": false,
"copies": "11",
"ref": "refs/heads/develop",
"path": "gemfire-core/src/main/java/com/gemstone/gemfire/internal/admin/StatResource.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1776"
},
{
"name": "CSS",
"bytes": "54202"
},
{
"name": "Groovy",
"bytes": "4066"
},
{
"name": "HTML",
"bytes": "124114"
},
{
"name": "Java",
"bytes": "25721529"
},
{
"name": "JavaScript",
"bytes": "315581"
},
{
"name": "Scala",
"bytes": "192735"
},
{
"name": "Shell",
"bytes": "7239"
}
],
"symlink_target": ""
} |
package com.intellij.codeInspection.bytecodeAnalysis;
import com.intellij.openapi.util.text.StringUtil;
import one.util.streamex.StreamEx;
import org.jetbrains.org.objectweb.asm.Handle;
import org.jetbrains.org.objectweb.asm.Type;
import org.jetbrains.org.objectweb.asm.tree.InvokeDynamicInsnNode;
import org.jetbrains.org.objectweb.asm.tree.analysis.BasicValue;
import java.util.List;
import java.util.function.Function;
import static org.jetbrains.org.objectweb.asm.Opcodes.*;
final class LambdaIndy {
private static final String LAMBDA_METAFACTORY_CLASS = "java/lang/invoke/LambdaMetafactory";
private static final String LAMBDA_METAFACTORY_METHOD = "metafactory";
private static final String LAMBDA_METAFACTORY_DESCRIPTOR =
"(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;";
private final int myTag;
private final Type myFunctionalMethodType;
private final Member myMethod;
private final Type myFunctionalInterfaceType;
private LambdaIndy(int tag, Type functionalMethodType, Member lambdaMethod, Type functionalInterfaceType) {
myTag = tag;
myFunctionalMethodType = functionalMethodType;
myMethod = lambdaMethod;
myFunctionalInterfaceType = functionalInterfaceType;
}
public int getTag() {
return myTag;
}
/**
* @return an opcode which corresponds to target method handle or -1 if method handle tag has no corresponding opcode
*/
public int getAssociatedOpcode() {
switch (myTag) {
case H_INVOKESTATIC:
return INVOKESTATIC;
case H_INVOKESPECIAL:
return INVOKESPECIAL;
case H_INVOKEINTERFACE:
return INVOKEINTERFACE;
case H_INVOKEVIRTUAL:
return INVOKEVIRTUAL;
}
return -1;
}
public Type getFunctionalMethodType() {
return myFunctionalMethodType;
}
public Member getMethod() {
return myMethod;
}
public Type getFunctionalInterfaceType() {
return myFunctionalInterfaceType;
}
/**
* Creates list of argument values which should be passed to lambda runtime representation method
*
* @param captured list of captured arguments
* @param valueSupplier function to create new values by type
* @return list of lambda argument values
*/
List<BasicValue> getLambdaMethodArguments(List<? extends BasicValue> captured, Function<Type, BasicValue> valueSupplier) {
// Lambda runtime representation args consist of captured values and invocation values
// E.g.:
// IntUnaryOperator getAdder(int addend) { return x -> addend + x; }
// will generate
// static int lambda$getAdder$0(int addend, int x) {return addend + x;}
return StreamEx.of(getFunctionalMethodType().getArgumentTypes()).map(valueSupplier).prepend(captured).toList();
}
public String toString() {
return "Lambda [" + myMethod.methodName + "]: " + StringUtil.getShortName(myFunctionalInterfaceType.getClassName());
}
static LambdaIndy from(InvokeDynamicInsnNode indyNode) {
Handle bsm = indyNode.bsm;
if(LAMBDA_METAFACTORY_CLASS.equals(bsm.getOwner()) &&
LAMBDA_METAFACTORY_METHOD.equals(bsm.getName()) &&
LAMBDA_METAFACTORY_DESCRIPTOR.equals(bsm.getDesc()) &&
indyNode.bsmArgs.length >= 3 && indyNode.bsmArgs[1] instanceof Handle && indyNode.bsmArgs[2] instanceof Type) {
Handle lambdaBody = (Handle)indyNode.bsmArgs[1];
Type targetType = (Type)indyNode.bsmArgs[2];
Member targetMethod = new Member(lambdaBody.getOwner(), lambdaBody.getName(), lambdaBody.getDesc());
Type retType = Type.getReturnType(indyNode.desc);
return new LambdaIndy(lambdaBody.getTag(), targetType, targetMethod, retType);
}
return null;
}
}
| {
"content_hash": "106ed5a4777fe53210d092d9516e314d",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 211,
"avg_line_length": 38.15,
"alnum_prop": 0.7397116644823066,
"repo_name": "mglukhikh/intellij-community",
"id": "ae6b16af7d6a9c7138963c9ae90956130b2118fb",
"size": "4415",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "java/java-analysis-impl/src/com/intellij/codeInspection/bytecodeAnalysis/LambdaIndy.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "20665"
},
{
"name": "AspectJ",
"bytes": "182"
},
{
"name": "Batchfile",
"bytes": "60827"
},
{
"name": "C",
"bytes": "211435"
},
{
"name": "C#",
"bytes": "1264"
},
{
"name": "C++",
"bytes": "197674"
},
{
"name": "CMake",
"bytes": "1675"
},
{
"name": "CSS",
"bytes": "201445"
},
{
"name": "CoffeeScript",
"bytes": "1759"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "Groovy",
"bytes": "3243028"
},
{
"name": "HLSL",
"bytes": "57"
},
{
"name": "HTML",
"bytes": "1899088"
},
{
"name": "J",
"bytes": "5050"
},
{
"name": "Java",
"bytes": "165554704"
},
{
"name": "JavaScript",
"bytes": "570364"
},
{
"name": "Jupyter Notebook",
"bytes": "93222"
},
{
"name": "Kotlin",
"bytes": "4611299"
},
{
"name": "Lex",
"bytes": "147047"
},
{
"name": "Makefile",
"bytes": "2352"
},
{
"name": "NSIS",
"bytes": "51276"
},
{
"name": "Objective-C",
"bytes": "27861"
},
{
"name": "Perl",
"bytes": "903"
},
{
"name": "Perl 6",
"bytes": "26"
},
{
"name": "Protocol Buffer",
"bytes": "6680"
},
{
"name": "Python",
"bytes": "25439881"
},
{
"name": "Roff",
"bytes": "37534"
},
{
"name": "Ruby",
"bytes": "1217"
},
{
"name": "Scala",
"bytes": "11698"
},
{
"name": "Shell",
"bytes": "66341"
},
{
"name": "Smalltalk",
"bytes": "338"
},
{
"name": "TeX",
"bytes": "25473"
},
{
"name": "Thrift",
"bytes": "1846"
},
{
"name": "TypeScript",
"bytes": "9469"
},
{
"name": "Visual Basic",
"bytes": "77"
},
{
"name": "XSLT",
"bytes": "113040"
}
],
"symlink_target": ""
} |
package com.akjava.gwt.uvexport.client;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import com.akjava.gwt.html5.client.file.File;
import com.akjava.gwt.html5.client.file.FileUploadForm;
import com.akjava.gwt.html5.client.file.FileUtils;
import com.akjava.gwt.html5.client.file.FileUtils.DataURLListener;
import com.akjava.gwt.lib.client.CanvasUtils;
import com.akjava.gwt.lib.client.LogUtils;
import com.akjava.gwt.three.client.js.THREE;
import com.akjava.gwt.three.client.js.core.Geometry;
import com.akjava.gwt.three.client.js.loaders.JSONLoader.JSONLoadHandler;
import com.akjava.gwt.three.client.js.materials.Material;
import com.akjava.gwt.three.client.js.materials.MeshPhongMaterial;
import com.akjava.gwt.three.client.js.math.Color;
import com.akjava.gwt.three.client.js.math.Vector2;
import com.akjava.lib.common.utils.ColorUtils;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.gwt.canvas.client.Canvas;
import com.google.gwt.canvas.dom.client.Context2d;
import com.google.gwt.canvas.dom.client.Context2d.LineCap;
import com.google.gwt.canvas.dom.client.Context2d.LineJoin;
import com.google.gwt.canvas.dom.client.ImageData;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.text.shared.Renderer;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.ValueListBox;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
/**
* Entry point classes define <code>onModuleLoad()</code>.
*/
public class GWTUVExport implements EntryPoint {
private Canvas canvas;
private double size;
private VerticalPanel container;
private Label messageLabel;
private double strokeSize=1;
/**
* This is the entry point method.
*/
public void onModuleLoad() {
VerticalPanel root=new VerticalPanel();
RootPanel.get().add(root);
HorizontalPanel panel=new HorizontalPanel();
panel.setVerticalAlignment(HorizontalPanel.ALIGN_MIDDLE);
root.add(panel);
FileUploadForm fileUpload=FileUtils.createSingleFileUploadForm(new DataURLListener() {
@Override
public void uploaded(File file, String text) {
loadJson(text);
}
}).setAccept(".json").insertTo(panel);
panel.add(new Label("size:"));
ValueListBox<Integer> sizeListBox=new ValueListBox<Integer>(new Renderer<Integer>() {
@Override
public String render(Integer object) {
if(object!=null){
return String.valueOf(object);
}
return null;
}
@Override
public void render(Integer object, Appendable appendable) throws IOException {
// TODO Auto-generated method stub
}
});
sizeListBox.setValue(1024);
sizeListBox.setAcceptableValues(Lists.newArrayList(256,512,1024,2048,4096,8192));
sizeListBox.addValueChangeHandler(new ValueChangeHandler<Integer>() {
@Override
public void onValueChange(ValueChangeEvent<Integer> event) {
size=event.getValue();
CanvasUtils.setSize(canvas, (int)size, (int)size);
if(lastGeometry!=null){
drawUV(lastGeometry,lastMaterials);
}
}
});
panel.add(sizeListBox);
List<Double> sizes=Lists.newArrayList();
for(double i=1;i<=32;i+=1){
sizes.add(i);
}
panel.add(new Label("stroke-size:"));
ValueListBox<Double> storokeSizeListBox=new ValueListBox<Double>(new Renderer<Double>() {
@Override
public String render(Double object) {
if(object!=null){
return String.valueOf(object);
}
return null;
}
@Override
public void render(Double object, Appendable appendable) throws IOException {
}
});
storokeSizeListBox.setValue(1.0);//extend because remove antialias
storokeSizeListBox.setAcceptableValues(sizes);
storokeSizeListBox.addValueChangeHandler(new ValueChangeHandler<Double>() {
@Override
public void onValueChange(ValueChangeEvent<Double> event) {
strokeSize=event.getValue();
if(lastGeometry!=null){
CanvasUtils.clear(canvas);
drawUV(lastGeometry,lastMaterials);
}
}
});
panel.add(storokeSizeListBox);
messageLabel = new Label();
root.add(messageLabel);
container = new VerticalPanel();
container.add(createCanvas());
root.add(container);
//TODO mix option and set that default
/* for test
Scheduler.get().scheduleDeferred(new ScheduledCommand() {
@Override
public void execute() {
loadJson("white.json");
}
});
*/
}
private Geometry lastGeometry;
private JsArray<Material> lastMaterials;
public void loadJson(String url){
CanvasUtils.clear(canvas);
THREE.JSONLoader().load(url, new JSONLoadHandler() {
@Override
public void loaded(Geometry geometry, JsArray<Material> materials) {
lastGeometry=geometry;
lastMaterials=materials;
drawUV(geometry,materials);
}
});
}
private Widget createCanvas() {
size = 1024*1;
canvas = CanvasUtils.createCanvas(size, size);
return canvas;
}
public double toX(double x){
return size*x;
}
public double toY(double y){
return size*(1.0-y);
}
private void drawUV(Geometry geometry,final @Nullable JsArray<Material> materials) {
drawUV(geometry,materials,strokeSize);
drawUV(geometry,materials,1);
/*
if(materials!=null){
Scheduler.get().scheduleEntry(new ScheduledCommand() {
@Override
public void execute() {
fixColors(materials);
LogUtils.log("fix");
}
});
}
*/
}
private int[] materialToRgb(Material material){
int[] result=new int[3];
Color color=material.gwtGetColor();
result[0]=(int) (color.getR()*255);
result[1]=(int) (color.getG()*255);
result[2]=(int) (color.getB()*255);
return result;
}
private int[] findClosest(int r,int g,int b,List<int[]> colors){
int[] result=colors.get(0);
double[] luv=LuvUtils.toLab(r, g,b);
double l=luv[0];
double u=luv[1];
double v=luv[2];
int[] rgb=colors.get(0);
double[] color=toLuv(colors.get(0));
double minlength=ColorUtils.getColorLength(l, u, v, color[0], color[1], color[2]);
for(int i=1;i<colors.size();i++){
rgb=colors.get(i);
color=toLuv(colors.get(i));
double length=ColorUtils.getColorLength(l, u, v, color[0], color[1], color[2]);
if(length<minlength){
minlength=length;
result=rgb;
}
}
return result;
}
Map<String,double[]> luvMap=Maps.newHashMap();
private double[] toLuv(int[] rgb) {
String key=rgb[0]+","+rgb[1]+rgb[2];
double[] result=luvMap.get(key);
if(result==null){
result=LuvUtils.toLab(rgb[0], rgb[1], rgb[2]);
luvMap.put(key, result);
}
return result;
// TODO Auto-generated method stub
}
private void fixColors(JsArray<Material> materials) {
//convert color
List<int[]> colors=Lists.newArrayList();
for(int i=0;i<materials.length();i++){
colors.add(materialToRgb(materials.get(i)));
}
ImageData imageData=CanvasUtils.getImageData(canvas);
for(int x=0;x<imageData.getWidth();x++){
for(int y=0;y<imageData.getHeight();y++){
//kill antialiase
int alpha=imageData.getAlphaAt(x, y);
if(alpha!=255 && alpha!=0){
if(alpha<128){
imageData.setAlphaAt(0, x, y);
alpha=0;
}else{
imageData.setAlphaAt(255, x, y);
}
}
if(alpha==0){
continue;
}
/*
int red=imageData.getRedAt(x, y);
int green=imageData.getGreenAt(x, y);
int blue=imageData.getBlueAt(x, y);
int[] rgb=findClosest(red,green,blue,colors);
if(red!=rgb[0]){
imageData.setRedAt(rgb[0], x, y);
}
if(green!=rgb[1]){
imageData.setGreenAt(rgb[1], x, y);
}
if(blue!=rgb[2]){
imageData.setBlueAt(rgb[2], x, y);
}
*/
}
}
canvas.getContext2d().putImageData(imageData, 0, 0);
}
private void drawUV(Geometry geometry,@Nullable JsArray<Material> materials,double storokeSize) {
//TODO support null;
Context2d context=canvas.getContext2d();
//LogUtils.log(geometry.getFaces().length());
canvas.getContext2d().save();
context.setLineCap(LineCap.ROUND);
context.setLineJoin(LineJoin.ROUND);
context.setLineWidth(storokeSize);//some edget problem
//LogUtils.log(geometry.getFaceVertexUvs().length());
for(int uvAt=0;uvAt<geometry.getFaceVertexUvs().length();uvAt++){//usually single
JsArray<JsArray<Vector2>> arrays=geometry.getFaceVertexUvs().get(uvAt);
LogUtils.log("faces:"+arrays.length());
for(int faceAt=0;faceAt<arrays.length();faceAt++){//same as face number
JsArray<Vector2> array=arrays.get(faceAt);
Vector2 last=array.get(array.length()-1);
context.beginPath();
//context.moveTo(toX(last.getX()),toY(last.getY()));
if(array.length()!=3){
LogUtils.log("uniq face number:"+array.length());
}
for(int j=0;j<array.length();j++){//usually j is 3 for each face
Vector2 vec2=array.get(j);
if(j==0){
context.moveTo(toX(vec2.getX()),toY(vec2.getY()));
}else{
context.lineTo(toX(vec2.getX()),toY(vec2.getY()));
}
//LogUtils.log(toX(vec2.getX())+","+toY(vec2.getY()));
}
context.closePath();//line to first one
if(materials!=null){
int index=geometry.getFaces().get(faceAt).getMaterialIndex();
MeshPhongMaterial phong=materials.get(index).cast();
String hex=phong.getColor().getHexString();
context.setFillStyle("#"+hex);
context.setStrokeStyle("#"+hex);
if(phong.isTransparent()){
context.setGlobalAlpha(phong.getOpacity());
}else{
context.setGlobalAlpha(1);
}
context.fill();
messageLabel.setText("");
}else{
messageLabel.setText("Not contain material/Exported without check Face Materials");
//no material
context.setStrokeStyle("#"+000);
}
context.stroke();
}
}
context.setGlobalAlpha(1);//can restore?
canvas.getContext2d().restore();
LogUtils.log("draw");
}
}
| {
"content_hash": "5085d735dd3cc26e0a67b4adfaf4c8e7",
"timestamp": "",
"source": "github",
"line_count": 386,
"max_line_length": 98,
"avg_line_length": 27.621761658031087,
"alnum_prop": 0.6659163383980492,
"repo_name": "akjava/GWTUVExport",
"id": "387a3c200d2e3b5a1b9f05d08dcaa8399ed23c76",
"size": "10662",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/akjava/gwt/uvexport/client/GWTUVExport.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "602"
},
{
"name": "HTML",
"bytes": "2367"
},
{
"name": "Java",
"bytes": "10662"
}
],
"symlink_target": ""
} |
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
tools:context="com.frrahat.cricscorer.PalyerNameInputActivity" >
<item
android:id="@+id/action_settings"
android:orderInCategory="100"
android:showAsAction="never"
android:title="@string/action_settings"/>
</menu>
| {
"content_hash": "3ad03838af74874f6a247eade67821d6",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 68,
"avg_line_length": 33.81818181818182,
"alnum_prop": 0.6854838709677419,
"repo_name": "frrahat/CricScorer",
"id": "3816cf492fa1f1ee7ed78e7273a903ee72a88579",
"size": "372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "res/menu/palyer_name_input.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "35769"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "1aa2a3fd6825b4be803455e2e4ea1781",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "6a65a2c4a45643c7ef2fe10161cc7b5d65c9ff39",
"size": "188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Rosales/Rosaceae/Alchemilla/Alchemilla pinnata/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package Google::Ads::GoogleAds::V12::Services::AdGroupLabelService::MutateAdGroupLabelsResponse;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
partialFailureError => $args->{partialFailureError},
results => $args->{results}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| {
"content_hash": "7fe2389e6ac8f9c7f00fa0ad50b100ea",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 96,
"avg_line_length": 25.045454545454547,
"alnum_prop": 0.691470054446461,
"repo_name": "googleads/google-ads-perl",
"id": "6afc0fa5154795ae2291e869d4a91a26cbf90a1f",
"size": "1127",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "lib/Google/Ads/GoogleAds/V12/Services/AdGroupLabelService/MutateAdGroupLabelsResponse.pm",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "73"
},
{
"name": "Perl",
"bytes": "5866064"
}
],
"symlink_target": ""
} |
#pragma once
#include <cstdint>
#include <string>
#include "envoy/http/codec.h"
#include "common/buffer/buffer_impl.h"
#include "common/common/logger.h"
#include "nghttp2/nghttp2.h"
namespace Envoy {
namespace Http {
namespace Http2 {
class MetadataEncoderDecoderTest_VerifyEncoderDecoderOnMultipleMetadataMaps_Test;
// A class that decodes METADATA payload in the format of HTTP/2 header block into MetadataMap, a
// map of string key value pairs.
class MetadataDecoder : Logger::Loggable<Logger::Id::http2> {
public:
/**
* @param cb is the decoder's callback function. The callback function is called when the decoder
* finishes decoding metadata.
*/
MetadataDecoder(MetadataCallback cb);
/**
* Calls this function when METADATA frame payload is received. The payload doesn't need to be
* complete.
* @param data is the pointer to the start of the payload.
* @param len is the size of the received payload.
* @return whether Metadata is received successfully.
*/
bool receiveMetadata(const uint8_t* data, size_t len);
/**
* Calls when a complete METADATA frame is received. The function will decode METADATA received.
* If the frame is the last one in the group, the function triggers the registered callback
* function callback_.
* @param end_metadata indicates if all the METADATA has been received.
* @return whether the operation succeeds.
*/
bool onMetadataFrameComplete(bool end_metadata);
/**
* @return payload_.
*/
Buffer::OwnedImpl& payload() { return payload_; }
MetadataMap& getMetadataMap() { return *metadata_map_; }
private:
friend class MetadataEncoderDecoderTest_VerifyEncoderDecoderOnMultipleMetadataMaps_Test;
friend class MetadataEncoderDecoderTest_VerifyEncoderDecoderMultipleMetadataReachSizeLimit_Test;
/**
* Decodes METADATA payload using nghttp2.
* @param end_metadata indicates is END_METADATA is true.
* @return if decoding succeeds.
*/
bool decodeMetadataPayloadUsingNghttp2(bool end_metadata);
// Metadata that is currently being decoded.
MetadataMapPtr metadata_map_;
// Metadata event callback function.
MetadataCallback callback_;
// Payload received.
Buffer::OwnedImpl payload_;
// Payload size limit. If the total payload received exceeds the limit, fails the connection.
const uint64_t max_payload_size_bound_ = 1024 * 1024;
uint64_t total_payload_size_ = 0;
// TODO(soya3129): consider sharing the inflater with all streams in a connection. Caveat:
// inflater failure on one stream can impact other streams.
typedef CSmartPtr<nghttp2_hd_inflater, nghttp2_hd_inflate_del> Inflater;
Inflater inflater_;
};
} // namespace Http2
} // namespace Http
} // namespace Envoy
| {
"content_hash": "8204ab6ee86ca219be3b0d69f21b8707",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 99,
"avg_line_length": 31.837209302325583,
"alnum_prop": 0.7432432432432432,
"repo_name": "dnoe/envoy",
"id": "2cac52559ced7560da88c91368f149a9c5f03497",
"size": "2738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/common/http/http2/metadata_decoder.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9173"
},
{
"name": "C++",
"bytes": "12933900"
},
{
"name": "Dockerfile",
"bytes": "245"
},
{
"name": "Emacs Lisp",
"bytes": "966"
},
{
"name": "Go",
"bytes": "836"
},
{
"name": "PowerShell",
"bytes": "4285"
},
{
"name": "PureBasic",
"bytes": "472"
},
{
"name": "Python",
"bytes": "946275"
},
{
"name": "Shell",
"bytes": "98909"
},
{
"name": "Thrift",
"bytes": "748"
}
],
"symlink_target": ""
} |
namespace Microsoft.Azure.Management.KeyVault.Models
{
using Microsoft.Rest;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// Resource information with extended details.
/// </summary>
public partial class Secret : Resource
{
/// <summary>
/// Initializes a new instance of the Secret class.
/// </summary>
public Secret()
{
CustomInit();
}
/// <summary>
/// Initializes a new instance of the Secret class.
/// </summary>
/// <param name="properties">Properties of the secret</param>
/// <param name="id">Fully qualified identifier of the key vault
/// resource.</param>
/// <param name="name">Name of the key vault resource.</param>
/// <param name="type">Resource type of the key vault resource.</param>
/// <param name="location">Azure location of the key vault
/// resource.</param>
/// <param name="tags">Tags assigned to the key vault resource.</param>
public Secret(SecretProperties properties, string id = default(string), string name = default(string), string type = default(string), string location = default(string), IDictionary<string, string> tags = default(IDictionary<string, string>))
: base(id, name, type, location, tags)
{
Properties = properties;
CustomInit();
}
/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();
/// <summary>
/// Gets or sets properties of the secret
/// </summary>
[JsonProperty(PropertyName = "properties")]
public SecretProperties Properties { get; set; }
/// <summary>
/// Validate the object.
/// </summary>
/// <exception cref="ValidationException">
/// Thrown if validation fails
/// </exception>
public virtual void Validate()
{
if (Properties == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "Properties");
}
}
}
}
| {
"content_hash": "75c5dd03cae9845c25d7ff5e371aa655",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 249,
"avg_line_length": 35.33846153846154,
"alnum_prop": 0.5811928602525033,
"repo_name": "Azure/azure-sdk-for-net",
"id": "18608bc439a869f830c5df0c250f7a905265ef5f",
"size": "2650",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/keyvault/Microsoft.Azure.Management.KeyVault/src/Generated/Models/Secret.cs",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
using System;
using System.Reflection;
using System.Runtime.InteropServices;
#endregion
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Verse.CLI")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Verse.CLI")]
[assembly: AssemblyCopyright("Copyright 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// This sets the default COM visibility of types in the assembly to invisible.
// If you need to expose a type to COM, use [ComVisible(true)] on that type.
[assembly: ComVisible(false)]
// The assembly version has following format :
//
// Major.Minor.Build.Revision
//
// You can specify all the values or you can use the default the Revision and
// Build Numbers by using the '*' as shown below:
[assembly: AssemblyVersion("1.0.*")] | {
"content_hash": "f6efb4d9a537642d2ab3f8e8d3f4fc15",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 78,
"avg_line_length": 31.757575757575758,
"alnum_prop": 0.7318702290076335,
"repo_name": "criteo/verse",
"id": "6d3c3c1cdd85862c7372c90fc853b11637fecbbb",
"size": "1074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Verse.Bench/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "229843"
}
],
"symlink_target": ""
} |
package org.kie.workbench.common.stunner.core.client.session.command.impl;
import org.kie.workbench.common.stunner.core.client.session.command.AbstractClientSessionCommandExecutedEvent;
import org.kie.workbench.common.stunner.core.client.session.impl.EditorSession;
public class CopySelectionSessionCommandExecutedEvent extends AbstractClientSessionCommandExecutedEvent<CopySelectionSessionCommand, EditorSession> {
public CopySelectionSessionCommandExecutedEvent(final CopySelectionSessionCommand executedCommand,
final EditorSession clientFullSession) {
super(executedCommand,
clientFullSession);
}
}
| {
"content_hash": "bdf18cc8ae928aab3d8a88b732bbedc3",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 149,
"avg_line_length": 46,
"alnum_prop": 0.7739130434782608,
"repo_name": "romartin/kie-wb-common",
"id": "6909691e8d4b3e1385791a3b1b03a783514bcbe7",
"size": "1309",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "kie-wb-common-stunner/kie-wb-common-stunner-core/kie-wb-common-stunner-commons/kie-wb-common-stunner-client-common/src/main/java/org/kie/workbench/common/stunner/core/client/session/command/impl/CopySelectionSessionCommandExecutedEvent.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2591"
},
{
"name": "CSS",
"bytes": "171885"
},
{
"name": "Dockerfile",
"bytes": "210"
},
{
"name": "FreeMarker",
"bytes": "38625"
},
{
"name": "GAP",
"bytes": "86275"
},
{
"name": "HTML",
"bytes": "448966"
},
{
"name": "Java",
"bytes": "51118150"
},
{
"name": "JavaScript",
"bytes": "34587"
},
{
"name": "Shell",
"bytes": "905"
},
{
"name": "TypeScript",
"bytes": "26851"
},
{
"name": "VBA",
"bytes": "86549"
},
{
"name": "XSLT",
"bytes": "2327"
}
],
"symlink_target": ""
} |
package org.apache.felix.dm.index.itest.tests;
//import static org.ops4j.pax.exam.CoreOptions.waitForFrameworkStartupFor;
//import static org.ops4j.pax.exam.container.def.PaxRunnerOptions.vmOption;
import java.util.function.Consumer;
import org.apache.felix.dm.Component;
import org.apache.felix.dm.DependencyManager;
import org.apache.felix.dm.FilterIndex;
import org.apache.felix.dm.index.itest.dynamiccustomindex.DynamicCustomFilterIndex;
import org.junit.Assert;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
import org.osgi.framework.ServiceRegistration;
/**
* @author <a href="mailto:[email protected]">Felix Project Team</a>
*/
public class DynamicCustomIndexTest extends TestBase {
/**
* This system property is set to true when the DynamicCustomFilterIndex index has been opened.
*/
private final static String OPENED = "org.apache.felix.dm.index.itest.dynamiccustomindex.CustomFilterIndex.opened";
private ServiceRegistration m_reg;
private String m_systemConf;
@SuppressWarnings("unchecked")
public void setUp() throws Exception {
System.setProperty(OPENED, "false");
// backup currently configured filter index
BundleContext context = FrameworkUtil.getBundle(this.getClass()).getBundleContext();
m_systemConf = context.getProperty(DependencyManager.SERVICEREGISTRY_CACHE_INDICES);
// Reset filter indices (we must initialize DependencyManager, so its static initializer will register
// the reset backdoor.
@SuppressWarnings("unused")
DependencyManager dm = new DependencyManager(context);
Consumer<String> reset = (Consumer<String>) System.getProperties().get("org.apache.felix.dependencymanager.filterindex.reset");
reset.accept(null); // clear filter index
// register our DynamicCustomFilterIndex service before calling super.setUp(). This will make
// the "getDM()" method return a DependencyManager that is using our DynamicCustomFilterIndex
m_reg = context.registerService(FilterIndex.class.getName(), new DynamicCustomFilterIndex("objectClass"), null);
super.setUp();
}
@SuppressWarnings("unchecked")
public void tearDown() throws Exception {
super.tearDown();
try {
m_reg.unregister();
} catch (IllegalStateException e) { // expected, normally we have already unregistered it
}
System.getProperties().remove(OPENED);
Consumer<String> reset = (Consumer<String>) System.getProperties().get("org.apache.felix.dependencymanager.filterindex.reset");
reset.accept(m_systemConf);
}
public void testUsingDynamicCustomIndex() throws Exception {
doTestUsingDynamicCustomIndex();
// Make sure our static custom index has been used
Assert.assertTrue(Boolean.getBoolean(OPENED));
// unregister our dynamic filter index
m_reg.unregister();
// clear the flag
System.setProperty(OPENED, "false");
// redo the test
doTestUsingDynamicCustomIndex();
Assert.assertFalse(Boolean.getBoolean(OPENED));
}
private void doTestUsingDynamicCustomIndex() throws Exception {
DependencyManager m = getDM();
// helper class that ensures certain steps get executed in sequence
Ensure e = new Ensure();
// create a provider
Provider provider = new Provider();
// activate it
Component p = m.createComponent()
.setInterface(Service.class.getName(), null)
.setImplementation(provider);
Client consumer = new Client(e);
Component c = m.createComponent()
.setImplementation(consumer)
.add(m.createServiceDependency()
.setService(Service.class)
.setRequired(true)
);
m.add(p);
m.add(c);
e.waitForStep(1, 5000);
m.remove(p);
e.waitForStep(2, 5000);
m.remove(c);
m.clear();
}
public static class Client {
volatile Service m_service;
private final Ensure m_ensure;
public Client(Ensure e) {
m_ensure = e;
}
public void start() {
System.out.println("start");
m_ensure.step(1);
}
public void stop() {
System.out.println("stop");
m_ensure.step(2);
}
}
public static interface Service {
}
public static class Provider implements Service {
}
}
| {
"content_hash": "28c8c0ebec525913fb77abcfbb589f11",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 135,
"avg_line_length": 34.61654135338346,
"alnum_prop": 0.6602953953084274,
"repo_name": "apache/felix-dev",
"id": "e96467c61bab2e8b0bd4f4db57f09952fb6ea937",
"size": "5411",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dependencymanager/org.apache.felix.dependencymanager.index.itest/src/org/apache/felix/dm/index/itest/tests/DynamicCustomIndexTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "53237"
},
{
"name": "Groovy",
"bytes": "9231"
},
{
"name": "HTML",
"bytes": "372812"
},
{
"name": "Java",
"bytes": "28836360"
},
{
"name": "JavaScript",
"bytes": "248796"
},
{
"name": "Scala",
"bytes": "40378"
},
{
"name": "Shell",
"bytes": "12628"
},
{
"name": "XSLT",
"bytes": "151258"
}
],
"symlink_target": ""
} |
template<class Interface>
inline void SafeRelease(Interface *& pInterfaceToRelease)
{
if (pInterfaceToRelease != nullptr)
{
pInterfaceToRelease->Release();
pInterfaceToRelease = nullptr;
}
}
template<class T>
inline void SafeDelete(T*& ptr)
{
if(ptr)
{
delete ptr;
ptr = nullptr;
}
}
// Safe delete array
template<class T>
inline void SafeDeleteArray(T*& pArray)
{
if(pArray)
{
delete[] pArray;
pArray = nullptr;
}
}
enum KinectVersionType{
KinectV1,
KinectV2,
KINECT_VERSION_TYPE_COUNT
};
#define WM_STREAMEVENT_COLOR WM_USER + 1
#define WM_STREAMEVENT_DEPTH WM_USER + 2
#pragma warning ( disable: 4996 )
#pragma warning ( disable: 4503 )
#include <memory>
#undef max
#undef min
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <boost/signal.hpp>
#include <boost/signals2.hpp>
#include <boost/bind.hpp>
#include <iomanip>
| {
"content_hash": "9c5c9b0413162ec0b78e9d8caade291c",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 57,
"avg_line_length": 17.40740740740741,
"alnum_prop": 0.6638297872340425,
"repo_name": "DanielMerget/FaceGrabber",
"id": "532677f6739b4018af89fe834b54f9ae7b06bf63",
"size": "2512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stdafx.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "677"
},
{
"name": "C",
"bytes": "36645"
},
{
"name": "C++",
"bytes": "623571"
},
{
"name": "CSS",
"bytes": "31119"
},
{
"name": "HLSL",
"bytes": "1977"
},
{
"name": "HTML",
"bytes": "1441527"
},
{
"name": "JavaScript",
"bytes": "43433"
},
{
"name": "Makefile",
"bytes": "508"
},
{
"name": "PostScript",
"bytes": "50575"
},
{
"name": "TeX",
"bytes": "255295"
}
],
"symlink_target": ""
} |
import Component from "./Component";
import Utils from "./Utils";
export default class Toolbar extends Component{
constructor(settings){
super();
let me =this,cls;
me.settings = $.extend(true,{
id: '',
position: 'top',
items:[]
},settings);
me.id = me.settings.id||Utils.id();
me.content = $(`
<div id="${me.id}" class="it-toolbar toolbar-${me.settings.position}">
<ul class="it-toolbar-left"></ul>
<ul class="it-toolbar-right"></ul>
</div>
`);
me.ids=[];
me.items={};
$.each(me.settings.items, function(k, el) {
if(el) {
let li = $('<li/>');
if(!el.isClass)
el=Utils.createObject(el);
el.renderTo(li);
me.content.find(`.it-toolbar-${el.getSetting().align||'left'}`).append(li);
me.ids.push(el.getId());
me.items[el.getId()] = el;
}
});
}
getItemCount(){
return this.ids.length;
}
getItem(id){
if(typeof id==="number")id = this.ids[id];
if(id)return this.items[id]||null;
return this.items;
}
} | {
"content_hash": "ffabb8f1143d61c25a8202187fe31218",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 79,
"avg_line_length": 23.547619047619047,
"alnum_prop": 0.5925176946410515,
"repo_name": "thinkitstartup/it-framework",
"id": "5471be57e964ee4a5e87f703bab741f617a929d5",
"size": "989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/js/lib/ToolBar.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "91748"
},
{
"name": "HTML",
"bytes": "11558"
},
{
"name": "JavaScript",
"bytes": "62567"
}
],
"symlink_target": ""
} |
module RuboCop
module Cop
module Rails
# This cop checks for the use of the read_attribute or
# write_attribute methods.
#
# @example
#
# # bad
# x = read_attribute(:attr)
# write_attribute(:attr, val)
#
# # good
# x = self[:attr]
# self[:attr] = val
class ReadWriteAttribute < Cop
MSG = 'Prefer `%s` over `%s`.'.freeze
def on_send(node)
receiver, method_name, *_args = *node
return if receiver
return unless [:read_attribute,
:write_attribute].include?(method_name)
add_offense(node, :selector)
end
def message(node)
_receiver, method_name, *_args = *node
if method_name == :read_attribute
format(MSG, 'self[:attr]', 'read_attribute(:attr)')
else
format(MSG, 'self[:attr] = val', 'write_attribute(:attr, val)')
end
end
def autocorrect(node)
_receiver, method_name, _body = *node
case method_name
when :read_attribute
replacement = read_attribute_replacement(node)
when :write_attribute
replacement = write_attribute_replacement(node)
end
->(corrector) { corrector.replace(node.source_range, replacement) }
end
private
def read_attribute_replacement(node)
_receiver, _method_name, body = *node
"self[#{body.source}]"
end
def write_attribute_replacement(node)
_receiver, _method_name, *args = *node
name, value = *args
"self[#{name.source}] = #{value.source}"
end
end
end
end
end
| {
"content_hash": "e93096e214f5ee20453f735868256424",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 77,
"avg_line_length": 25.852941176470587,
"alnum_prop": 0.5250284414106939,
"repo_name": "dreyks/rubocop",
"id": "aad8c2f04265fa0b0242a1b7f8538e88fd9f7dea",
"size": "1807",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "lib/rubocop/cop/rails/read_write_attribute.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "355"
},
{
"name": "HTML",
"bytes": "7106"
},
{
"name": "Ruby",
"bytes": "3248681"
}
],
"symlink_target": ""
} |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.assistants;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbModelingAssistantProvider;
/**
* @generated
*/
public class EsbModelingAssistantProviderOfEntitlementObligationsContainerEditPart
extends EsbModelingAssistantProvider {
/**
* @generated
*/
@Override
public List<IElementType> getTypesForPopupBar(IAdaptable host) {
List<IElementType> types = new ArrayList<IElementType>(1);
types.add(EsbElementTypes.MediatorFlow_3760);
return types;
}
}
| {
"content_hash": "77fbf21732753f0b7cdbaee439efb8f7",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 95,
"avg_line_length": 31,
"alnum_prop": 0.7694145758661888,
"repo_name": "prabushi/devstudio-tooling-esb",
"id": "705185209906611ab04cf6d16578786ec4cc53f3",
"size": "837",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/providers/assistants/EsbModelingAssistantProviderOfEntitlementObligationsContainerEditPart.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "41098"
},
{
"name": "HTML",
"bytes": "731356"
},
{
"name": "Java",
"bytes": "77332976"
},
{
"name": "JavaScript",
"bytes": "475592"
},
{
"name": "Shell",
"bytes": "7727"
}
],
"symlink_target": ""
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.oasisdigital</groupId>
<artifactId>jira-rcarz-example</artifactId>
<version>0.0.1-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>net.rcarz</groupId>
<artifactId>jira-client</artifactId>
<version>0.3</version>
</dependency>
</dependencies>
<properties>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
</properties>
</project> | {
"content_hash": "0fec5761597680c165dfc8caac3f5625",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 104,
"avg_line_length": 35.1578947368421,
"alnum_prop": 0.7275449101796407,
"repo_name": "kylecordes/jira-api-anon",
"id": "2a50beca8f7b00424655db964b1305c5ad528dde",
"size": "668",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pom.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1997"
}
],
"symlink_target": ""
} |
#define LAYOUTFILE <Controls4U_Demo/Controls4U_Demo.lay>
#include <CtrlCore/lay.h>
class EditFileFolder_Demo : public WithEditFileFolder<StaticRect> {
public:
typedef EditFileFolder_Demo CLASSNAME;
EditFileFolder_Demo();
void OnNewFile();
void ChangeProperties();
};
class StaticCtrls_Demo : public WithStaticCtrls<StaticRect> {
public:
typedef StaticCtrls_Demo CLASSNAME;
StaticCtrls_Demo();
};
class StaticCtrlsTest_Demo : public WithStaticCtrlsTest<StaticRect> {
public:
typedef StaticCtrlsTest_Demo CLASSNAME;
StaticCtrlsTest_Demo();
};
class StaticClock_Demo : public WithStaticClock<StaticRect> {
public:
typedef StaticClock_Demo CLASSNAME;
StaticClock_Demo();
void UpdateInfo();
void ChangeProperties();
};
class Meter_Demo : public WithMeter<StaticRect> {
public:
typedef Meter_Demo CLASSNAME;
Meter_Demo();
void ChangeValueKnob(Knob *knob, Meter *meter);
void ChangeProperties();
void ChangePropertiesKnob();
};
class FileBrowser_Demo : public WithFileBrowser<StaticRect> {
public:
typedef FileBrowser_Demo CLASSNAME;
FileBrowser_Demo();
void FileSelected();
void FileOpened();
void ChangeProperties();
};
class Functions4U_Demo : public WithFunctions4U<StaticRect> {
public:
typedef Functions4U_Demo CLASSNAME;
Functions4U_Demo();
void OnDiff();
void OnPatch();
void OnSet();
};
class PainterCanvas_Demo : public WithPainterCanvas<StaticRect> {
public:
typedef PainterCanvas_Demo CLASSNAME;
PainterCanvas_Demo();
};
class StaticImageSet_Demo : public WithStaticImageSet<StaticRect> {
public:
typedef StaticImageSet_Demo CLASSNAME;
StaticImageSet_Demo();
};
class Controls4U_Demo : public WithMain<TopWindow> {
public:
typedef Controls4U_Demo CLASSNAME;
Controls4U_Demo();
EditFileFolder_Demo editFileFolder_Demo;
StaticCtrls_Demo staticCtrls_Demo;
StaticCtrlsTest_Demo staticCtrlsTest_Demo;
StaticClock_Demo staticClock_Demo;
Meter_Demo meter_Demo;
JBControlsDemo jbcontrols_Demo;
FileBrowser_Demo fileBrowser_Demo;
Functions4U_Demo functions4U_Demo;
PainterCanvas_Demo painterCanvas_Demo;
StaticImageSet_Demo staticImageSet_Demo;
#if defined(PLATFORM_WIN32)
Firefox_Demo firefox_Demo;
IExplorer_Demo iexplorer_Demo;
VLC_Demo vlc_Demo;
#endif
int timerOn;
void Timer();
};
#endif
| {
"content_hash": "14d78eec71352be7080f0f60a70262e4",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 69,
"avg_line_length": 24.03061224489796,
"alnum_prop": 0.7473460721868365,
"repo_name": "dreamsxin/ultimatepp",
"id": "80a888776fc402e54b97067b28e8a2566f685357",
"size": "2443",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bazaar/Controls4U_Demo/Controls4U_Demo.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "8477"
},
{
"name": "C",
"bytes": "47921993"
},
{
"name": "C++",
"bytes": "28354499"
},
{
"name": "CSS",
"bytes": "659"
},
{
"name": "JavaScript",
"bytes": "7006"
},
{
"name": "Objective-C",
"bytes": "178854"
},
{
"name": "Perl",
"bytes": "65041"
},
{
"name": "Python",
"bytes": "38142"
},
{
"name": "Shell",
"bytes": "91097"
},
{
"name": "Smalltalk",
"bytes": "101"
},
{
"name": "Turing",
"bytes": "661569"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
NUB Generator [autonym]
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "ac6093eb4f84286dbacbd1ca65fc241b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 23,
"avg_line_length": 9.076923076923077,
"alnum_prop": 0.6779661016949152,
"repo_name": "mdoering/backbone",
"id": "6b8e6ad9293940125d1dede6212a9c7ab96cd657",
"size": "166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Ericales/Ericaceae/Erica/Erica glauca/Erica glauca glauca/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package com.github.ppodgorsek.juncacher.varnish.strategy.impl;
import com.github.ppodgorsek.juncacher.model.InvalidationEntry;
import com.github.ppodgorsek.juncacher.strategy.InvalidationStrategy;
import com.github.ppodgorsek.juncacher.varnish.strategy.AbstractVarnishUrlStrategy;
/**
* URL strategy that does no transformation to URLs.
*
* @since 1.0
* @author Paul Podgorsek
*/
public class SimpleVarnishUrlStrategy extends AbstractVarnishUrlStrategy<InvalidationEntry>
implements InvalidationStrategy<InvalidationEntry> {
@Override
public boolean canHandle(final InvalidationEntry entry) {
return true;
}
@Override
protected String getUpdatedUrl(final InvalidationEntry entry, final String url) {
return url;
}
}
| {
"content_hash": "acce066a194cad78eb2e138eead4d8ce",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 91,
"avg_line_length": 28.384615384615383,
"alnum_prop": 0.8116531165311653,
"repo_name": "ppodgorsek/cache-invalidation",
"id": "f9e4973a11ffda7226d77488f742710a29ff81bb",
"size": "738",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "juncacher-varnish/src/main/java/com/github/ppodgorsek/juncacher/varnish/strategy/impl/SimpleVarnishUrlStrategy.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "53744"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<style>
#container {
overflow: hidden;
will-change: transform;
width: 400px;
height: 400px;
background-attachment: local;
background-color: blue;
}
#child {
height: 500px;
}
</style>
<div id="container">
<div id="child"></div>
</div>
<script src="../resources/text-based-repaint.js"></script>
<script>
function repaintTest() {
child.style.height = '2000px';
container.scrollTop = 2000;
}
onload = runRepaintAndPixelTest;
</script>
| {
"content_hash": "5c02e27d8f427a8d64a6915f188478ab",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 58,
"avg_line_length": 17.37037037037037,
"alnum_prop": 0.6801705756929638,
"repo_name": "google-ar/WebARonARCore",
"id": "ea1db4bc1d46da5636f4bbcc0b70822300f7027d",
"size": "469",
"binary": false,
"copies": "3",
"ref": "refs/heads/webarcore_57.0.2987.5",
"path": "third_party/WebKit/LayoutTests/paint/invalidation/compositing/background-attachment-local-equivalent-expected.html",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/global/inter-level-propagation-policies/level1-to-level2/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the propagation
of prefixes from IS-IS Level 1 to Level 2.
"""
__slots__ = (
"_path_helper", "_extmethods", "__import_policy", "__default_import_policy"
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__import_policy = YANGDynClass(
base=TypedListType(allowed_type=six.text_type),
is_leaf=False,
yang_name="import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__default_import_policy = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ACCEPT_ROUTE": {}, "REJECT_ROUTE": {}},
),
default=six.text_type("REJECT_ROUTE"),
is_leaf=True,
yang_name="default-import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="default-policy-type",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"global",
"inter-level-propagation-policies",
"level1-to-level2",
"config",
]
def _get_import_policy(self):
"""
Getter method for import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/import_policy (leafref)
YANG Description: list of policy names in sequence to be applied on
receiving a routing update in the current context, e.g.,
for the current peer group, neighbor, address family,
etc.
"""
return self.__import_policy
def _set_import_policy(self, v, load=False):
"""
Setter method for import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/import_policy (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_import_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_import_policy() directly.
YANG Description: list of policy names in sequence to be applied on
receiving a routing update in the current context, e.g.,
for the current peer group, neighbor, address family,
etc.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(allowed_type=six.text_type),
is_leaf=False,
yang_name="import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """import_policy must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=six.text_type), is_leaf=False, yang_name="import-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__import_policy = t
if hasattr(self, "_set"):
self._set()
def _unset_import_policy(self):
self.__import_policy = YANGDynClass(
base=TypedListType(allowed_type=six.text_type),
is_leaf=False,
yang_name="import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_default_import_policy(self):
"""
Getter method for default_import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/default_import_policy (default-policy-type)
YANG Description: explicitly set a default policy if no policy definition
in the import policy chain is satisfied.
"""
return self.__default_import_policy
def _set_default_import_policy(self, v, load=False):
"""
Setter method for default_import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/default_import_policy (default-policy-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_import_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_import_policy() directly.
YANG Description: explicitly set a default policy if no policy definition
in the import policy chain is satisfied.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ACCEPT_ROUTE": {}, "REJECT_ROUTE": {}},
),
default=six.text_type("REJECT_ROUTE"),
is_leaf=True,
yang_name="default-import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="default-policy-type",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_import_policy must be of a type compatible with default-policy-type""",
"defined-type": "openconfig-network-instance:default-policy-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCEPT_ROUTE': {}, 'REJECT_ROUTE': {}},), default=six.text_type("REJECT_ROUTE"), is_leaf=True, yang_name="default-import-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='default-policy-type', is_config=True)""",
}
)
self.__default_import_policy = t
if hasattr(self, "_set"):
self._set()
def _unset_default_import_policy(self):
self.__default_import_policy = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ACCEPT_ROUTE": {}, "REJECT_ROUTE": {}},
),
default=six.text_type("REJECT_ROUTE"),
is_leaf=True,
yang_name="default-import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="default-policy-type",
is_config=True,
)
import_policy = __builtin__.property(_get_import_policy, _set_import_policy)
default_import_policy = __builtin__.property(
_get_default_import_policy, _set_default_import_policy
)
_pyangbind_elements = OrderedDict(
[
("import_policy", import_policy),
("default_import_policy", default_import_policy),
]
)
class config(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/global/inter-level-propagation-policies/level1-to-level2/config. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Configuration parameters relating to the propagation
of prefixes from IS-IS Level 1 to Level 2.
"""
__slots__ = (
"_path_helper", "_extmethods", "__import_policy", "__default_import_policy"
)
_yang_name = "config"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__import_policy = YANGDynClass(
base=TypedListType(allowed_type=six.text_type),
is_leaf=False,
yang_name="import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
self.__default_import_policy = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ACCEPT_ROUTE": {}, "REJECT_ROUTE": {}},
),
default=six.text_type("REJECT_ROUTE"),
is_leaf=True,
yang_name="default-import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="default-policy-type",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"global",
"inter-level-propagation-policies",
"level1-to-level2",
"config",
]
def _get_import_policy(self):
"""
Getter method for import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/import_policy (leafref)
YANG Description: list of policy names in sequence to be applied on
receiving a routing update in the current context, e.g.,
for the current peer group, neighbor, address family,
etc.
"""
return self.__import_policy
def _set_import_policy(self, v, load=False):
"""
Setter method for import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/import_policy (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_import_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_import_policy() directly.
YANG Description: list of policy names in sequence to be applied on
receiving a routing update in the current context, e.g.,
for the current peer group, neighbor, address family,
etc.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(allowed_type=six.text_type),
is_leaf=False,
yang_name="import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """import_policy must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=six.text_type), is_leaf=False, yang_name="import-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=True)""",
}
)
self.__import_policy = t
if hasattr(self, "_set"):
self._set()
def _unset_import_policy(self):
self.__import_policy = YANGDynClass(
base=TypedListType(allowed_type=six.text_type),
is_leaf=False,
yang_name="import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="leafref",
is_config=True,
)
def _get_default_import_policy(self):
"""
Getter method for default_import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/default_import_policy (default-policy-type)
YANG Description: explicitly set a default policy if no policy definition
in the import policy chain is satisfied.
"""
return self.__default_import_policy
def _set_default_import_policy(self, v, load=False):
"""
Setter method for default_import_policy, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/inter_level_propagation_policies/level1_to_level2/config/default_import_policy (default-policy-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_import_policy is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_import_policy() directly.
YANG Description: explicitly set a default policy if no policy definition
in the import policy chain is satisfied.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ACCEPT_ROUTE": {}, "REJECT_ROUTE": {}},
),
default=six.text_type("REJECT_ROUTE"),
is_leaf=True,
yang_name="default-import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="default-policy-type",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_import_policy must be of a type compatible with default-policy-type""",
"defined-type": "openconfig-network-instance:default-policy-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'ACCEPT_ROUTE': {}, 'REJECT_ROUTE': {}},), default=six.text_type("REJECT_ROUTE"), is_leaf=True, yang_name="default-import-policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='default-policy-type', is_config=True)""",
}
)
self.__default_import_policy = t
if hasattr(self, "_set"):
self._set()
def _unset_default_import_policy(self):
self.__default_import_policy = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"ACCEPT_ROUTE": {}, "REJECT_ROUTE": {}},
),
default=six.text_type("REJECT_ROUTE"),
is_leaf=True,
yang_name="default-import-policy",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="default-policy-type",
is_config=True,
)
import_policy = __builtin__.property(_get_import_policy, _set_import_policy)
default_import_policy = __builtin__.property(
_get_default_import_policy, _set_default_import_policy
)
_pyangbind_elements = OrderedDict(
[
("import_policy", import_policy),
("default_import_policy", default_import_policy),
]
)
| {
"content_hash": "2f00321e283a0d9c3c82353d04ee2de6",
"timestamp": "",
"source": "github",
"line_count": 519,
"max_line_length": 603,
"avg_line_length": 42.86897880539499,
"alnum_prop": 0.5935547665063599,
"repo_name": "napalm-automation/napalm-yang",
"id": "ea9b6405ab2269e679a8da87d9b69958e7dcaa91",
"size": "22273",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/global_/inter_level_propagation_policies/level1_to_level2/config/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "370237"
},
{
"name": "Jupyter Notebook",
"bytes": "152135"
},
{
"name": "Makefile",
"bytes": "1965"
},
{
"name": "Python",
"bytes": "105688785"
},
{
"name": "Roff",
"bytes": "1632"
}
],
"symlink_target": ""
} |
package com.hanuor.pearl_demonstration;
import android.content.Context;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import com.hanuor.pearl.Pearl;
import java.util.ArrayList;
/**
* Created by Shantanu Johri on 03-08-2016.
*/
public class ClassicAdapter extends BaseAdapter{
private Context mContext;
private final ArrayList<String> Imageid;
public ClassicAdapter(Context c,ArrayList<String> Imageid ) {
mContext = c;
this.Imageid = Imageid;
}
@Override
public int getCount() {
// TODO Auto-generated method stub
Log.d("Paral", ""+ Imageid.size());
return Imageid.size();
}
@Override
public Object getItem(int position) {
// TODO Auto-generated method stub
return null;
}
@Override
public long getItemId(int position) {
// TODO Auto-generated method stub
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
// TODO Auto-generated method stub
View grid;
LayoutInflater inflater = (LayoutInflater) mContext
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
if (convertView == null) {
grid = new View(mContext);
grid = inflater.inflate(R.layout.single, null);
ImageView thumb= (ImageView)grid.findViewById(R.id.grid_image);
Log.d("ferrrari","HEY");
thumb.setTag(position);
Log.d("fferhh",thumb.getTag()+"");
Log.d("fffffff",""+position);
if(position == (getCount()-1)){
Log.d("Weate","We are here");
}
Pearl.imageLoader(mContext,Imageid.get(position),thumb,R.drawable.more);
} else {
grid = (View) convertView;
}
return grid;
}
}
| {
"content_hash": "7d1c193c552dcb2b853d767472a46245",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 84,
"avg_line_length": 25.487179487179485,
"alnum_prop": 0.6161971830985915,
"repo_name": "hanuor/pearl",
"id": "3df650802fe8495c9bc6f344615608ffd9bc9185",
"size": "1988",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/hanuor/pearl_demonstration/ClassicAdapter.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "394284"
}
],
"symlink_target": ""
} |
'use strict';var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") return Reflect.decorate(decorators, target, key, desc);
switch (arguments.length) {
case 2: return decorators.reduceRight(function(o, d) { return (d && d(o)) || o; }, target);
case 3: return decorators.reduceRight(function(o, d) { return (d && d(target, key)), void 0; }, void 0);
case 4: return decorators.reduceRight(function(o, d) { return (d && d(target, key, o)) || o; }, desc);
}
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var di_1 = require('angular2/di');
var collection_1 = require('angular2/src/core/facade/collection');
var api_1 = require('angular2/src/core/render/api');
var serializer_1 = require('angular2/src/web_workers/shared/serializer');
var messaging_api_1 = require('angular2/src/web_workers/shared/messaging_api');
var message_bus_1 = require('angular2/src/web_workers/shared/message_bus');
var async_1 = require('angular2/src/core/facade/async');
var event_deserializer_1 = require('./event_deserializer');
var ng_zone_1 = require('angular2/src/core/zone/ng_zone');
var WebWorkerEventDispatcher = (function () {
function WebWorkerEventDispatcher(bus, _serializer, _zone) {
var _this = this;
this._serializer = _serializer;
this._zone = _zone;
this._eventDispatchRegistry = new collection_1.Map();
var source = bus.from(messaging_api_1.EVENT_CHANNEL);
async_1.ObservableWrapper.subscribe(source, function (message) { return _this._dispatchEvent(new RenderEventData(message, _serializer)); });
}
WebWorkerEventDispatcher.prototype._dispatchEvent = function (eventData) {
var dispatcher = this._eventDispatchRegistry.get(eventData.viewRef);
this._zone.run(function () {
eventData.locals['$event'] = event_deserializer_1.deserializeGenericEvent(eventData.locals['$event']);
dispatcher.dispatchRenderEvent(eventData.elementIndex, eventData.eventName, eventData.locals);
});
};
WebWorkerEventDispatcher.prototype.registerEventDispatcher = function (viewRef, dispatcher) {
this._eventDispatchRegistry.set(viewRef, dispatcher);
};
WebWorkerEventDispatcher = __decorate([
di_1.Injectable(),
__metadata('design:paramtypes', [message_bus_1.MessageBus, serializer_1.Serializer, ng_zone_1.NgZone])
], WebWorkerEventDispatcher);
return WebWorkerEventDispatcher;
})();
exports.WebWorkerEventDispatcher = WebWorkerEventDispatcher;
var RenderEventData = (function () {
function RenderEventData(message, serializer) {
this.viewRef = serializer.deserialize(message['viewRef'], api_1.RenderViewRef);
this.elementIndex = message['elementIndex'];
this.eventName = message['eventName'];
this.locals = collection_1.MapWrapper.createFromStringMap(message['locals']);
}
return RenderEventData;
})();
//# sourceMappingURL=event_dispatcher.js.map | {
"content_hash": "01869557f0ab918ade13ddd8f1d0fee9",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 148,
"avg_line_length": 58,
"alnum_prop": 0.6785714285714286,
"repo_name": "rehnen/potato",
"id": "3d13311eecb16ad4e70d897e8ab3d8e2990bec9c",
"size": "3248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "node_modules/angular2/src/web_workers/worker/event_dispatcher.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "575"
},
{
"name": "JavaScript",
"bytes": "3314"
},
{
"name": "TypeScript",
"bytes": "1112"
}
],
"symlink_target": ""
} |
========
Usage
========
To use linkpile in a project::
import linkpile | {
"content_hash": "b591eb12b3714e36d971c0192ed62778",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 30,
"avg_line_length": 10.857142857142858,
"alnum_prop": 0.5526315789473685,
"repo_name": "gjost/django-linkpile",
"id": "6b133ff50a69a41e4269365800d97eaa73f8eaa6",
"size": "76",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/usage.rst",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "4501"
},
{
"name": "Makefile",
"bytes": "1225"
},
{
"name": "Python",
"bytes": "20086"
}
],
"symlink_target": ""
} |
'use strict';
module.exports = require.requireActual('restoreRelayCacheData');
| {
"content_hash": "b6f114d3cedd203bf2bca538b2b2b9f3",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 64,
"avg_line_length": 16.4,
"alnum_prop": 0.7682926829268293,
"repo_name": "mroch/relay",
"id": "b48a4bf2d07c759cc73ca5043702e72a9ad58c3e",
"size": "390",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/store/__mocks__/restoreRelayCacheData.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "22237"
},
{
"name": "HTML",
"bytes": "308"
},
{
"name": "JavaScript",
"bytes": "2250030"
},
{
"name": "Shell",
"bytes": "396"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Frameset//EN" "http://www.w3.org/TR/html4/frameset.dtd">
<!-- NewPage -->
<html lang="pl">
<head>
<!-- Generated by javadoc -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Play! 2.x Provider for Play! 2.7.x 1.0.0-rc5 API</title>
<script type="text/javascript">
tmpTargetPage = "" + window.location.search;
if (tmpTargetPage != "" && tmpTargetPage != "undefined")
tmpTargetPage = tmpTargetPage.substring(1);
if (tmpTargetPage.indexOf(":") != -1 || (tmpTargetPage != "" && !validURL(tmpTargetPage)))
tmpTargetPage = "undefined";
targetPage = tmpTargetPage;
function validURL(url) {
try {
url = decodeURIComponent(url);
}
catch (error) {
return false;
}
var pos = url.indexOf(".html");
if (pos == -1 || pos != url.length - 5)
return false;
var allowNumber = false;
var allowSep = false;
var seenDot = false;
for (var i = 0; i < url.length - 5; i++) {
var ch = url.charAt(i);
if ('a' <= ch && ch <= 'z' ||
'A' <= ch && ch <= 'Z' ||
ch == '$' ||
ch == '_' ||
ch.charCodeAt(0) > 127) {
allowNumber = true;
allowSep = true;
} else if ('0' <= ch && ch <= '9'
|| ch == '-') {
if (!allowNumber)
return false;
} else if (ch == '/' || ch == '.') {
if (!allowSep)
return false;
allowNumber = false;
allowSep = false;
if (ch == '.')
seenDot = true;
if (ch == '/' && seenDot)
return false;
} else {
return false;
}
}
return true;
}
function loadFrames() {
if (targetPage != "" && targetPage != "undefined")
top.classFrame.location = top.targetPage;
}
</script>
</head>
<frameset cols="20%,80%" title="Documentation frame" onload="top.loadFrames()">
<frameset rows="30%,70%" title="Left frames" onload="top.loadFrames()">
<frame src="overview-frame.html" name="packageListFrame" title="All Packages">
<frame src="allclasses-frame.html" name="packageFrame" title="All classes and interfaces (except non-static nested types)">
</frameset>
<frame src="overview-summary.html" name="classFrame" title="Package, class and interface descriptions" scrolling="yes">
<noframes>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<h2>Frame Alert</h2>
<p>This document is designed to be viewed using the frames feature. If you see this message, you are using a non-frame-capable web client. Link to <a href="overview-summary.html">Non-frame version</a>.</p>
</noframes>
</frameset>
</html>
| {
"content_hash": "572b8ab6d70da76e75739fe0b20c5622",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 205,
"avg_line_length": 38.96052631578947,
"alnum_prop": 0.5305639986491051,
"repo_name": "play2-maven-plugin/play2-maven-plugin.github.io",
"id": "818296986a1ccbf832553d75c3ee0ecba10d9f40",
"size": "2961",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "play2-maven-plugin/1.0.0-rc5/play2-providers/play2-provider-play27/apidocs/index.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2793124"
},
{
"name": "HTML",
"bytes": "178221432"
},
{
"name": "JavaScript",
"bytes": "120742"
}
],
"symlink_target": ""
} |
from collections import abc
import itertools
import re
import sys
from neutron_lib.api import attributes
from neutron_lib.api.definitions import network as net_apidef
from neutron_lib import constants
from neutron_lib import context
from neutron_lib import exceptions
from neutron_lib.plugins import directory
from neutron_lib.services import constants as service_const
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_policy import opts
from oslo_policy import policy
from oslo_utils import excutils
import stevedore
from neutron._i18n import _
from neutron.common import cache_utils as cache
LOG = logging.getLogger(__name__)
_ENFORCER = None
ADMIN_CTX_POLICY = 'context_is_admin'
ADVSVC_CTX_POLICY = 'context_is_advsvc'
# Identify the attribute used by a resource to reference another resource
_RESOURCE_FOREIGN_KEYS = {
net_apidef.COLLECTION_NAME: 'network_id',
# TODO(slaweq): use SECURITYGROUPS constant from api def when
# securitygroups api def will be moved to neutron-lib
'security_groups': 'security_group_id'
}
# TODO(gmann): Remove setting the default value of config policy_file
# once oslo_policy change the default value to 'policy.yaml'.
# https://github.com/openstack/oslo.policy/blob/a626ad12fe5a3abd49d70e3e5b95589d279ab578/oslo_policy/opts.py#L49
DEFAULT_POLICY_FILE = 'policy.yaml'
opts.set_defaults(cfg.CONF, DEFAULT_POLICY_FILE)
def reset():
global _ENFORCER
if _ENFORCER:
_ENFORCER.clear()
_ENFORCER = None
def register_rules(enforcer):
extmgr = stevedore.extension.ExtensionManager('neutron.policies',
invoke_on_load=True)
policies = [list(e.obj) for e in extmgr.extensions]
LOG.debug('Loaded default policies from %s '
'under neutron.policies entry points',
[e.name for e in extmgr.extensions])
enforcer.register_defaults(itertools.chain(*policies))
def init(conf=cfg.CONF, policy_file=None, suppress_deprecation_warnings=False):
"""Init an instance of the Enforcer class."""
global _ENFORCER
if not _ENFORCER:
_ENFORCER = policy.Enforcer(conf, policy_file=policy_file)
# TODO(slaweq) Explictly disable the warnings for policies
# changing their default check_str. During policy-defaults-refresh
# work, all the policy defaults have been changed and warning for
# each policy started filling the logs limit for various tool.
# Once we move to new defaults only world then we can enable these
# warning again.
_ENFORCER.suppress_default_change_warnings = True
if suppress_deprecation_warnings:
_ENFORCER.suppress_deprecation_warnings = True
register_rules(_ENFORCER)
_ENFORCER.load_rules(True)
def refresh(policy_file=None):
"""Reset policy and init a new instance of Enforcer."""
reset()
init(policy_file=policy_file)
def get_resource_and_action(action, pluralized=None):
"""Return resource and enforce_attr_based_check(boolean) per
resource and action extracted from api operation.
"""
data = action.split(':', 1)[0].split('_', 1)
resource = pluralized or ("%ss" % data[-1])
enforce_attr_based_check = data[0] not in ('get', 'delete')
return (resource, enforce_attr_based_check)
def set_rules(policies, overwrite=True):
"""Set rules based on the provided dict of rules.
:param policies: New policies to use. It should be an instance of dict.
:param overwrite: Whether to overwrite current rules or update them
with the new rules.
"""
LOG.debug("Loading policies from file: %s", _ENFORCER.policy_path)
init()
_ENFORCER.set_rules(policies, overwrite)
def _is_attribute_explicitly_set(attribute_name, resource, target, action):
"""Verify that an attribute is present and is explicitly set."""
if target.get(constants.ATTRIBUTES_TO_UPDATE):
# In the case of update, the function should not pay attention to a
# default value of an attribute, but check whether it was explicitly
# marked as being updated instead.
return (attribute_name in target[constants.ATTRIBUTES_TO_UPDATE] and
target[attribute_name] is not constants.ATTR_NOT_SPECIFIED)
result = (attribute_name in target and
target[attribute_name] is not constants.ATTR_NOT_SPECIFIED)
if result and 'default' in resource[attribute_name]:
return target[attribute_name] != resource[attribute_name]['default']
return result
def _should_validate_sub_attributes(attribute, sub_attr):
"""Verify that sub-attributes are iterable and should be validated."""
validate = attribute.get('validate')
return (validate and isinstance(sub_attr, abc.Iterable) and
any([k.startswith('type:dict') and
v for (k, v) in validate.items()]))
def _build_subattr_match_rule(attr_name, attr, action, target):
"""Create the rule to match for sub-attribute policy checks."""
# TODO(salv-orlando): Instead of relying on validator info, introduce
# typing for API attributes
# Expect a dict as type descriptor
validate = attr['validate']
key = [k for k in validate.keys() if k.startswith('type:dict')]
if not key:
LOG.warning("Unable to find data type descriptor for attribute %s",
attr_name)
return
data = validate[key[0]]
if not isinstance(data, dict):
LOG.debug("Attribute type descriptor is not a dict. Unable to "
"generate any sub-attr policy rule for %s.",
attr_name)
return
sub_attr_rules = [policy.RuleCheck('rule', '%s:%s:%s' %
(action, attr_name,
sub_attr_name)) for
sub_attr_name in data if sub_attr_name in
target[attr_name]]
return policy.AndCheck(sub_attr_rules)
def _build_list_of_subattrs_rule(attr_name, attribute_value, action):
rules = []
for sub_attr in attribute_value:
if isinstance(sub_attr, dict):
for k in sub_attr:
rules.append(policy.RuleCheck(
'rule', '%s:%s:%s' % (action, attr_name, k)))
if rules:
return policy.AndCheck(rules)
def _process_rules_list(rules, match_rule):
"""Recursively walk a policy rule to extract a list of match entries."""
if isinstance(match_rule, policy.RuleCheck):
rules.append(match_rule.match)
elif isinstance(match_rule, policy.AndCheck):
for rule in match_rule.rules:
_process_rules_list(rules, rule)
return rules
def _build_match_rule(action, target, pluralized):
"""Create the rule to match for a given action.
The policy rule to be matched is built in the following way:
1) add entries for matching permission on objects
2) add an entry for the specific action (e.g.: create_network)
3) add an entry for attributes of a resource for which the action
is being executed (e.g.: create_network:shared)
4) add an entry for sub-attributes of a resource for which the
action is being executed
(e.g.: create_router:external_gateway_info:network_id)
"""
match_rule = policy.RuleCheck('rule', action)
registered_rule = _ENFORCER.registered_rules.get(action)
if registered_rule and registered_rule.scope_types:
match_rule.scope_types = registered_rule.scope_types
resource, enforce_attr_based_check = get_resource_and_action(
action, pluralized)
if enforce_attr_based_check:
# assigning to variable with short name for improving readability
res_map = attributes.RESOURCES
if resource in res_map:
for attribute_name in res_map[resource]:
if _is_attribute_explicitly_set(attribute_name,
res_map[resource],
target, action):
attribute = res_map[resource][attribute_name]
if 'enforce_policy' in attribute:
attr_rule = policy.RuleCheck(
'rule', '%s:%s' % (action, attribute_name))
# Build match entries for sub-attributes
if _should_validate_sub_attributes(
attribute, target[attribute_name]):
attr_rule = policy.AndCheck(
[attr_rule, _build_subattr_match_rule(
attribute_name, attribute,
action, target)])
attribute_value = target[attribute_name]
if isinstance(attribute_value, list):
subattr_rule = _build_list_of_subattrs_rule(
attribute_name, attribute_value, action)
if subattr_rule:
attr_rule = policy.AndCheck(
[attr_rule, subattr_rule])
match_rule = policy.AndCheck([match_rule, attr_rule])
return match_rule
# This check is registered as 'tenant_id' so that it can override
# GenericCheck which was used for validating parent resource ownership.
# This will prevent us from having to handling backward compatibility
# for policy.yaml
# TODO(salv-orlando): Reinstate GenericCheck for simple tenant_id checks
@policy.register('tenant_id')
class OwnerCheck(policy.Check):
"""Resource ownership check.
This check verifies the owner of the current resource, or of another
resource referenced by the one under analysis.
In the former case it falls back to a regular GenericCheck, whereas
in the latter case it leverages the plugin to load the referenced
resource and perform the check.
"""
def __init__(self, kind, match):
self._orig_kind = kind
self._orig_match = match
# Process the match
try:
self.target_field = re.findall(r'^\%\((.*)\)s$',
match)[0]
except IndexError:
err_reason = (_("Unable to identify a target field from:%s. "
"Match should be in the form %%(<field_name>)s") %
match)
LOG.exception(err_reason)
raise exceptions.PolicyInitError(
policy="%s:%s" % (kind, match),
reason=err_reason)
self._cache = cache._get_memory_cache_region(expiration_time=5)
super(OwnerCheck, self).__init__(kind, match)
# NOTE(slaweq): It seems we need to have it like that, otherwise we hit
# TypeError: cannot pickle '_thread.RLock' object
# during initialization of the policy rules when Neutron is run with
# mod_uwsgi, see bug https://bugs.launchpad.net/neutron/+bug/1915494 for
# details
def __deepcopy__(self, memo):
return OwnerCheck(self._orig_kind, self._orig_match)
@cache.cache_method_results
def _extract(self, resource_type, resource_id, field):
# NOTE(salv-orlando): This check currently assumes the parent
# resource is handled by the core plugin. It might be worth
# having a way to map resources to plugins so to make this
# check more general
plugin = directory.get_plugin()
if resource_type in service_const.EXT_PARENT_RESOURCE_MAPPING:
plugin = directory.get_plugin(
service_const.EXT_PARENT_RESOURCE_MAPPING[resource_type])
f = getattr(plugin, 'get_%s' % resource_type)
# f *must* exist, if not found it is better to let neutron
# explode. Check will be performed with admin context
try:
data = f(context.get_admin_context(),
resource_id,
fields=[field])
except exceptions.NotFound as e:
# NOTE(kevinbenton): a NotFound exception can occur if a
# list operation is happening at the same time as one of
# the parents and its children being deleted. So we issue
# a RetryRequest so the API will redo the lookup and the
# problem items will be gone.
raise db_exc.RetryRequest(e)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception('Policy check error while calling %s!', f)
return data[field]
def __call__(self, target, creds, enforcer):
if self.target_field not in target:
# policy needs a plugin check
# target field is in the form resource:field
# however if they're not separated by a colon, use an underscore
# as a separator for backward compatibility
def do_split(separator):
parent_res, parent_field = self.target_field.split(
separator, 1)
return parent_res, parent_field
for separator in (':', '_'):
try:
parent_res, parent_field = do_split(separator)
break
except ValueError:
LOG.debug("Unable to find ':' as separator in %s.",
self.target_field)
else:
# If we are here split failed with both separators
err_reason = (_("Unable to find resource name in %s") %
self.target_field)
LOG.error(err_reason)
raise exceptions.PolicyCheckError(
policy="%s:%s" % (self.kind, self.match),
reason=err_reason)
parent_foreign_key = _RESOURCE_FOREIGN_KEYS.get(
"%ss" % parent_res, None)
if parent_res == constants.EXT_PARENT_PREFIX:
for resource in service_const.EXT_PARENT_RESOURCE_MAPPING:
key = "%s_%s_id" % (constants.EXT_PARENT_PREFIX, resource)
if key in target:
parent_foreign_key = key
parent_res = resource
break
if not parent_foreign_key:
err_reason = (_("Unable to verify match:%(match)s as the "
"parent resource: %(res)s was not found") %
{'match': self.match, 'res': parent_res})
LOG.error(err_reason)
raise exceptions.PolicyCheckError(
policy="%s:%s" % (self.kind, self.match),
reason=err_reason)
target[self.target_field] = self._extract(
parent_res, target[parent_foreign_key], parent_field)
match = self.match % target
if self.kind in creds:
return match == str(creds[self.kind])
return False
@policy.register('field')
class FieldCheck(policy.Check):
def __init__(self, kind, match):
self._orig_kind = kind
self._orig_match = match
# Process the match
resource, field_value = match.split(':', 1)
field, value = field_value.split('=', 1)
super(FieldCheck, self).__init__(kind, '%s:%s:%s' %
(resource, field, value))
# Value might need conversion - we need help from the attribute map
try:
attr = attributes.RESOURCES[resource][field]
conv_func = attr['convert_to']
except KeyError:
conv_func = lambda x: x
self.field = field
self.resource = resource
self.value = conv_func(value)
self.regex = re.compile(value[1:]) if value.startswith('~') else None
# TODO(stephenfin): Remove this when we drop support for Python 3.6, since
# that supports copying regex objects natively
def __deepcopy__(self, memo):
return FieldCheck(self._orig_kind, self._orig_match)
def __call__(self, target_dict, cred_dict, enforcer):
target_value = self._get_target_value(target_dict)
# target_value might be a boolean, explicitly compare with None
if target_value is None:
return False
if self.regex:
return bool(self.regex.match(target_value))
return target_value == self.value
def _get_target_value(self, target_dict):
if self.field in target_dict:
return target_dict[self.field]
# NOTE(slaweq): In case that target field is "networks:shared" we need
# to treat it in "special" way as it may be used for resources other
# than network, e.g. for port or subnet
target_value = None
if self.resource == "networks" and self.field == constants.SHARED:
target_network_id = target_dict.get("network_id")
if not target_network_id:
LOG.debug("Unable to find network_id field in target: "
"%(target_dict)s",
{'field': self.field, 'target_dict': target_dict})
return
project_id = target_dict.get('project_id')
ctx = (context.Context(tenant_id=project_id) if project_id
else context.get_admin_context())
plugin = directory.get_plugin()
network = plugin.get_network(ctx, target_network_id)
target_value = network.get(self.field)
if target_value is None:
LOG.debug("Unable to find requested field: %(field)s in target: "
"%(target_dict)s",
{'field': self.field, 'target_dict': target_dict})
return target_value
def _prepare_check(context, action, target, pluralized):
"""Prepare rule, target, and context for the policy engine."""
# Compare with None to distinguish case in which target is {}
if target is None:
target = {}
match_rule = _build_match_rule(action, target, pluralized)
return match_rule, target, context
def log_rule_list(match_rule):
if LOG.isEnabledFor(logging.DEBUG):
rules = _process_rules_list([], match_rule)
LOG.debug("Enforcing rules: %s", rules)
def check(context, action, target, plugin=None, might_not_exist=False,
pluralized=None):
"""Verifies that the action is valid on the target in this context.
:param context: neutron context
:param action: string representing the action to be checked
this should be colon separated for clarity.
:param target: dictionary representing the object of the action
for object creation this should be a dictionary representing the
location of the object e.g. ``{'project_id': context.project_id}``
:param plugin: currently unused and deprecated.
Kept for backward compatibility.
:param might_not_exist: If True the policy check is skipped (and the
function returns True) if the specified policy does not exist.
Defaults to false.
:param pluralized: pluralized case of resource
e.g. firewall_policy -> pluralized = "firewall_policies"
:return: Returns True if access is permitted else False.
"""
# If we already know the context has admin rights do not perform an
# additional check and authorize the operation
# TODO(slaweq): Remove that is_admin check and always perform rules checks
# when old, deprecated rules will be removed and only rules with new
# personas will be supported
if not cfg.CONF.oslo_policy.enforce_new_defaults and context.is_admin:
return True
if might_not_exist and not (_ENFORCER.rules and action in _ENFORCER.rules):
return True
match_rule, target, credentials = _prepare_check(context,
action,
target,
pluralized)
return _ENFORCER.enforce(match_rule,
target,
credentials,
pluralized=pluralized)
def enforce(context, action, target, plugin=None, pluralized=None):
"""Verifies that the action is valid on the target in this context.
:param context: neutron context
:param action: string representing the action to be checked
this should be colon separated for clarity.
:param target: dictionary representing the object of the action
for object creation this should be a dictionary representing the
location of the object e.g. ``{'project_id': context.project_id}``
:param plugin: currently unused and deprecated.
Kept for backward compatibility.
:param pluralized: pluralized case of resource
e.g. firewall_policy -> pluralized = "firewall_policies"
:raises oslo_policy.policy.PolicyNotAuthorized:
if verification fails.
"""
# If we already know the context has admin rights do not perform an
# additional check and authorize the operation
# TODO(slaweq): Remove that is_admin check and always perform rules checks
# when old, deprecated rules will be removed and only rules with new
# personas will be supported
if not cfg.CONF.oslo_policy.enforce_new_defaults and context.is_admin:
return True
rule, target, context = _prepare_check(context, action, target, pluralized)
try:
result = _ENFORCER.enforce(rule, target, context, action=action,
do_raise=True)
except (policy.PolicyNotAuthorized, policy.InvalidScope):
with excutils.save_and_reraise_exception():
log_rule_list(rule)
LOG.debug("Failed policy enforce for '%s'", action)
return result
def get_enforcer():
# NOTE(amotoki): This was borrowed from nova/policy.py.
# This method is for use by oslo.policy CLI scripts. Those scripts need the
# 'output-file' and 'namespace' options, but having those in sys.argv means
# loading the neutron config options will fail as those are not expected to
# be present. So we pass in an arg list with those stripped out.
conf_args = []
# Start at 1 because cfg.CONF expects the equivalent of sys.argv[1:]
i = 1
while i < len(sys.argv):
if sys.argv[i].strip('-') in ['namespace', 'output-file']:
i += 2
continue
conf_args.append(sys.argv[i])
i += 1
cfg.CONF(conf_args, project='neutron')
init()
return _ENFORCER
| {
"content_hash": "cc2476fdce7b295a261f4a9eaefe21f1",
"timestamp": "",
"source": "github",
"line_count": 530,
"max_line_length": 112,
"avg_line_length": 42.8188679245283,
"alnum_prop": 0.6135101789019124,
"repo_name": "openstack/neutron",
"id": "dca079b3c980e378c1a5b92fc95fd93a458c8f60",
"size": "23335",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/policy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "2773"
},
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "15932611"
},
{
"name": "Ruby",
"bytes": "1257"
},
{
"name": "Shell",
"bytes": "83270"
}
],
"symlink_target": ""
} |
package org.cybergarage.upnp.std.av.server.object.format;
import java.io.*;
import org.cybergarage.upnp.std.av.server.object.*;
public class PNGFormat extends ImageIOFormat
{
////////////////////////////////////////////////
// Constroctor
////////////////////////////////////////////////
public PNGFormat()
{
}
public PNGFormat(File file)
{
super(file);
}
////////////////////////////////////////////////
// Abstract Methods
////////////////////////////////////////////////
public boolean equals(File file)
{
String headerID = Header.getIDString(file, 1, 3);
if (headerID.startsWith("PNG") == true)
return true;
return false;
}
public FormatObject createObject(File file)
{
return new PNGFormat(file);
}
public String getMimeType()
{
return "image/png";
}
}
| {
"content_hash": "bf36110306e03abdde950cfe76f4f0b3",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 57,
"avg_line_length": 18.319148936170212,
"alnum_prop": 0.4912891986062718,
"repo_name": "beamly/CyberLink4Java",
"id": "eea4280dd1e12a064b6bc08c0684cc9130065374",
"size": "1157",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "std/src/main/java/org/cybergarage/upnp/std/av/server/object/format/PNGFormat.java",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "889638"
},
{
"name": "Perl",
"bytes": "12295"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe Checker::Modules::Yaml do
it 'should only check .yaml and .yml files' do
files = ['a.rb', 'b.js.erb', 'c.r', 'd.yml', 'e.yaml']
mod = Checker::Modules::Yaml.new(files)
mod.stub(:check_one_file).and_return(stub(:success? => true, :status => :ok))
mod.should_receive(:check_one_file).with('d.yml')
mod.should_receive(:check_one_file).with('e.yaml')
mod.should_not_receive(:check_one_file).with('a.rb')
mod.should_not_receive(:check_one_file).with('b.js.erb')
mod.should_not_receive(:check_one_file).with('c.r')
mod.check
end
it "should properly fetch yaml files" do
files = [fixture("yaml", "good.yaml")]
mod = Checker::Modules::Yaml.new(files)
mod.check.should be_true
end
it "should not pass the syntax check" do
files = [fixture("yaml", "bad.yaml")]
mod = Checker::Modules::Yaml.new(files)
mod.check.should be_false
end
end
| {
"content_hash": "08b569b576e086a0e87044070a549c13",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 81,
"avg_line_length": 34.51851851851852,
"alnum_prop": 0.6459227467811158,
"repo_name": "netguru/checker",
"id": "f6f055616f8a21e4b8a6ccaaefcdc52c9bae9e47",
"size": "932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/checker/modules/yaml_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "57"
},
{
"name": "Ruby",
"bytes": "42373"
}
],
"symlink_target": ""
} |
MathJax.Localization.addTranslation("cdo","HelpDialog",{version:"2.7.4",isLoaded:true,strings:{}});MathJax.Ajax.loadComplete("[MathJax]/localization/cdo/HelpDialog.js");
| {
"content_hash": "45636ed06c9cd346e21f44ace1a7f9b7",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 169,
"avg_line_length": 57.333333333333336,
"alnum_prop": 0.7674418604651163,
"repo_name": "sashberd/cdnjs",
"id": "bde299cbf4a68ddd199dd0f8f89786342ebc1023",
"size": "843",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "ajax/libs/mathjax/2.7.4/localization/cdo/HelpDialog.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package org.knowm.xchange.bitmex.dto.marketdata;
import java.math.BigDecimal;
import java.util.Date;
import org.knowm.xchange.bitmex.dto.trade.BitmexSide;
import org.knowm.xchange.bitmex.dto.trade.BitmexTickDirection;
import com.fasterxml.jackson.annotation.JsonProperty;
public class BitmexPublicTrade {
private final BigDecimal price;
private final BigDecimal size;
private final Date timestamp;
private final BitmexSide side;
private final BitmexTickDirection tickDirection;
private final String symbol;
private final String trdMatchID;
private final BigDecimal grossValue;
private final BigDecimal homeNotional;
private final BigDecimal foreignNotional;
public BitmexPublicTrade(@JsonProperty("price") BigDecimal price, @JsonProperty("size") BigDecimal size, @JsonProperty("timestamp") Date timestamp, @JsonProperty("side") BitmexSide side,
@JsonProperty("tickDirection") BitmexTickDirection tickDirection, @JsonProperty("symbol") String symbol, @JsonProperty("trdMatchID") String trdMatchID,
@JsonProperty("grossValue") BigDecimal grossValue, @JsonProperty("homeNotional") BigDecimal homeNotional, @JsonProperty("foreignNotional") BigDecimal foreignNotional) {
this.price = price;
this.size = size;
this.timestamp = timestamp;
this.side = side;
this.tickDirection = tickDirection;
this.symbol = symbol;
this.trdMatchID = trdMatchID;
this.homeNotional = homeNotional;
this.foreignNotional = foreignNotional;
this.grossValue = grossValue;
}
public BigDecimal getPrice() {
return price;
}
public BigDecimal getSize() {
return size;
}
public Date getTime() {
return timestamp;
}
public BitmexSide getSide() {
return side;
}
public BitmexTickDirection getTickDirection() {
return tickDirection;
}
public String getSymbol() {
return symbol;
}
public String getTrdMatchID() {
return trdMatchID;
}
@Override
public String toString() {
return "BitmexPublicTrade [symbol=" + symbol + " price=" + price + ", size=" + size + ", time=" + timestamp + ", side=" + side + ", tickDirection=" + tickDirection + "]";
}
}
| {
"content_hash": "3f9bb081c223466814bc8de1d50e6d4d",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 188,
"avg_line_length": 26.51219512195122,
"alnum_prop": 0.7290708371665133,
"repo_name": "evdubs/XChange",
"id": "d62e923fefab0aa9531fad1ee157311013c80453",
"size": "2174",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "xchange-bitmex/src/main/java/org/knowm/xchange/bitmex/dto/marketdata/BitmexPublicTrade.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "5220592"
}
],
"symlink_target": ""
} |
package thrift
import (
"compress/gzip"
"io"
"net/http"
"strings"
)
// NewThriftHandlerFunc is a function that create a ready to use Apache Thrift Handler function
func NewThriftHandlerFunc(processor TProcessor,
inPfactory, outPfactory TProtocolFactory) func(w http.ResponseWriter, r *http.Request) {
return gz(func(w http.ResponseWriter, r *http.Request) {
w.Header().Add("Content-Type", "application/x-thrift")
transport := NewStreamTransport(r.Body, w)
processor.Process(r.Context(), inPfactory.GetProtocol(transport), outPfactory.GetProtocol(transport))
})
}
// gz transparently compresses the HTTP response if the client supports it.
func gz(handler http.HandlerFunc) http.HandlerFunc {
sp := newPool(func() *gzip.Writer {
return gzip.NewWriter(nil)
}, nil)
return func(w http.ResponseWriter, r *http.Request) {
if !strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
handler(w, r)
return
}
w.Header().Set("Content-Encoding", "gzip")
gz := sp.get()
gz.Reset(w)
defer func() {
gz.Close()
sp.put(&gz)
}()
gzw := gzipResponseWriter{Writer: gz, ResponseWriter: w}
handler(gzw, r)
}
}
type gzipResponseWriter struct {
io.Writer
http.ResponseWriter
}
func (w gzipResponseWriter) Write(b []byte) (int, error) {
return w.Writer.Write(b)
}
| {
"content_hash": "2c31f61e993980cd80bdfed0ceb0c9c4",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 103,
"avg_line_length": 24.185185185185187,
"alnum_prop": 0.7120980091883614,
"repo_name": "nsuke/thrift",
"id": "c84aba953cba0d5cf337e11de08f0b37e5ea63b3",
"size": "2109",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "lib/go/thrift/http_transport.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "34900"
},
{
"name": "C",
"bytes": "1068458"
},
{
"name": "C#",
"bytes": "531415"
},
{
"name": "C++",
"bytes": "4752353"
},
{
"name": "CMake",
"bytes": "128611"
},
{
"name": "CSS",
"bytes": "1070"
},
{
"name": "D",
"bytes": "662065"
},
{
"name": "Dart",
"bytes": "181474"
},
{
"name": "Dockerfile",
"bytes": "66393"
},
{
"name": "Emacs Lisp",
"bytes": "5361"
},
{
"name": "Erlang",
"bytes": "323055"
},
{
"name": "Go",
"bytes": "707964"
},
{
"name": "HTML",
"bytes": "36484"
},
{
"name": "Haxe",
"bytes": "319989"
},
{
"name": "Java",
"bytes": "1379918"
},
{
"name": "JavaScript",
"bytes": "456805"
},
{
"name": "Kotlin",
"bytes": "60847"
},
{
"name": "Lex",
"bytes": "10761"
},
{
"name": "Lua",
"bytes": "81630"
},
{
"name": "M4",
"bytes": "172618"
},
{
"name": "Makefile",
"bytes": "216507"
},
{
"name": "OCaml",
"bytes": "39269"
},
{
"name": "PHP",
"bytes": "353558"
},
{
"name": "Pascal",
"bytes": "594372"
},
{
"name": "Perl",
"bytes": "133070"
},
{
"name": "Python",
"bytes": "509091"
},
{
"name": "Ruby",
"bytes": "400013"
},
{
"name": "Rust",
"bytes": "362681"
},
{
"name": "Shell",
"bytes": "61391"
},
{
"name": "Smalltalk",
"bytes": "22944"
},
{
"name": "Swift",
"bytes": "165395"
},
{
"name": "Thrift",
"bytes": "425010"
},
{
"name": "TypeScript",
"bytes": "61760"
},
{
"name": "Vim script",
"bytes": "2846"
},
{
"name": "Yacc",
"bytes": "26413"
}
],
"symlink_target": ""
} |
Wordpress Doge Mode plugin.
Feel free to submit new words.
## For development:
1. npm install
2. gulp watch
## Description
Replaces are images with doge, floating doge phases, doge font!
## Installation
1. Upload `wp-doge-mode` to the `/wp-content/plugins/` directory
1. Activate the plugin through the 'Plugins' menu in WordPress
## Changelog
= 1.0 =
* First release
## Upgrade Notice
* 1.0 First Release
| {
"content_hash": "296601dd0f665a58626b17ef954ff92f",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 64,
"avg_line_length": 17.291666666666668,
"alnum_prop": 0.7301204819277108,
"repo_name": "lukasjuhas/wp-doge-mode",
"id": "1716e2782d4ed24400f339c6af2998be17f6c643",
"size": "427",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "2579"
},
{
"name": "PHP",
"bytes": "4030"
}
],
"symlink_target": ""
} |
namespace palo {
class DataStreamMgr;
// Single receiver of an m:n data stream.
// Incoming row batches are routed to destinations based on the provided
// partitioning specification.
// Receivers are created via DataStreamMgr::CreateRecvr().
class DataStreamRecvr {
public:
// deregister from _mgr
~DataStreamRecvr() {
// TODO: log error msg
_mgr->deregister_recvr(_cb->fragment_instance_id(), _cb->dest_node_id());
}
// Returns next row batch in data stream; blocks if there aren't any.
// Returns NULL if eos (subsequent calls will not return any more batches).
// Sets 'is_cancelled' to true if receiver fragment got cancelled, otherwise false.
// The caller owns the batch.
// TODO: error handling
RowBatch* get_batch(bool* is_cancelled) {
return _cb->get_batch(is_cancelled);
}
RuntimeProfile* profile() {
return _cb->profile();
}
private:
friend class DataStreamMgr;
DataStreamMgr* _mgr;
boost::shared_ptr<DataStreamMgr::StreamControlBlock> _cb;
DataStreamRecvr(DataStreamMgr* mgr,
boost::shared_ptr<DataStreamMgr::StreamControlBlock> cb)
: _mgr(mgr), _cb(cb) {}
};
}
#endif
| {
"content_hash": "ba76e62f173b09c6fc9283f87c051866",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 87,
"avg_line_length": 28.904761904761905,
"alnum_prop": 0.6655683690280065,
"repo_name": "cyongli/palo",
"id": "f58784ce60f36643d29260b0a7f4cc1a32bf73ab",
"size": "2280",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "be/src/runtime/data_stream_recvr.hpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "437331"
},
{
"name": "C++",
"bytes": "10120876"
},
{
"name": "CMake",
"bytes": "66285"
},
{
"name": "CSS",
"bytes": "3843"
},
{
"name": "Java",
"bytes": "6250887"
},
{
"name": "JavaScript",
"bytes": "5625"
},
{
"name": "Lex",
"bytes": "29068"
},
{
"name": "Makefile",
"bytes": "9065"
},
{
"name": "Python",
"bytes": "124239"
},
{
"name": "Shell",
"bytes": "26242"
},
{
"name": "Thrift",
"bytes": "170737"
},
{
"name": "Yacc",
"bytes": "97567"
}
],
"symlink_target": ""
} |
package com.intel.analytics.bigdl.keras.nn
import com.intel.analytics.bigdl.keras.KerasBaseSpec
import com.intel.analytics.bigdl.nn.keras.{Sequential => KSequential}
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.nn.keras.LeakyReLU
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class LeakyReLUSpec extends KerasBaseSpec{
"LeakyReLU" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3])
|input = np.random.uniform(0, 1, [1, 3])
|output_tensor = LeakyReLU(0.01)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = KSequential[Float]()
val leakyrelu = LeakyReLU[Float](0.01, inputShape = Shape(3))
seq.add(leakyrelu)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode)
}
"LeakyReLU 3D" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3, 24])
|input = np.random.random([2, 3, 24])
|output_tensor = LeakyReLU(1.27)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = KSequential[Float]()
val leakyrelu = LeakyReLU[Float](1.27, inputShape = Shape(3, 24))
seq.add(leakyrelu)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode)
}
}
class LeakyReLUSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = LeakyReLU[Float](1.27, inputShape = Shape(8, 24))
layer.build(Shape(2, 8, 24))
val input = Tensor[Float](2, 8, 24).apply1(_ => Random.nextFloat())
runSerializationTest(layer, input)
}
}
| {
"content_hash": "a6abc8c7e8677a433a9991d253f17ad4",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 93,
"avg_line_length": 34.660714285714285,
"alnum_prop": 0.6929417825862957,
"repo_name": "yiheng/BigDL",
"id": "d3bb168107a0904a056d6ed8b630ddeb38aaa270",
"size": "2542",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "spark/dl/src/test/scala/com/intel/analytics/bigdl/keras/nn/LeakyReLUSpec.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "5177"
},
{
"name": "Java",
"bytes": "6829"
},
{
"name": "Lua",
"bytes": "1904"
},
{
"name": "Python",
"bytes": "1010500"
},
{
"name": "RobotFramework",
"bytes": "30098"
},
{
"name": "Scala",
"bytes": "8194601"
},
{
"name": "Shell",
"bytes": "55677"
}
],
"symlink_target": ""
} |
namespace RefactorExamBitsToBitsTask
{
using System;
using System.Text;
public class BitsToBits
{
public static void Main()
{
int numbersCount = int.Parse(Console.ReadLine());
long maxCountZeros = 0;
long maxCountOnes = 0;
long currentZeros = 0;
long currentOnes = 0;
StringBuilder binary = new StringBuilder();
for (int i = 0; i < numbersCount; i++)
{
long number = long.Parse(Console.ReadLine());
binary.Append(Convert.ToString(number, 2).PadLeft(30, '0'));
}
char currentSymbol = binary[0];
for (int index = 0; index < binary.Length; index++)
{
if (currentSymbol == '0')
{
while (currentSymbol == binary[index])
{
currentZeros++;
if (index + 1 < binary.Length)
{
index++;
}
else
{
break;
}
}
if (maxCountZeros < currentZeros)
{
maxCountZeros = currentZeros;
}
currentZeros = 0;
currentSymbol = binary[index];
if (index != binary.Length - 1)
{
index--;
}
}
else if (currentSymbol == '1')
{
while (currentSymbol == binary[index])
{
currentOnes++;
if (index + 1 < binary.Length)
{
index++;
}
else
{
break;
}
}
if (maxCountOnes < currentOnes)
{
maxCountOnes = currentOnes;
}
currentSymbol = binary[index];
currentOnes = 0;
if (index != binary.Length - 1)
{
index--;
}
}
}
Console.WriteLine(maxCountZeros);
Console.WriteLine(maxCountOnes);
}
}
}
| {
"content_hash": "6fb4147d740bbe4ee35dfb6bb6a70520",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 76,
"avg_line_length": 28.505494505494507,
"alnum_prop": 0.3338473400154202,
"repo_name": "zhenyaracheva/TelerikAcademy",
"id": "bcf8b16559ce58ddf98d274dea52addd36a8f5a0",
"size": "2596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "High Quality Code/Homeworks/ControlFlowConditionalStatementsLoops/RefactorExamBitsToBitsTask/BitsToBits.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "224"
},
{
"name": "C#",
"bytes": "1690430"
},
{
"name": "CSS",
"bytes": "33473"
},
{
"name": "HTML",
"bytes": "188174"
},
{
"name": "JavaScript",
"bytes": "375622"
},
{
"name": "SQLPL",
"bytes": "3830"
},
{
"name": "XSLT",
"bytes": "4674"
}
],
"symlink_target": ""
} |
package proto
import (
"bytes"
"compress/gzip"
"encoding/binary"
"errors"
"fmt"
"hash/crc32"
"io"
"io/ioutil"
"time"
"github.com/golang/snappy"
)
/*
Kafka wire protocol implemented as described in
https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-Messagesets
*/
const (
ProduceReqKind = 0
FetchReqKind = 1
OffsetReqKind = 2
MetadataReqKind = 3
OffsetCommitReqKind = 8
OffsetFetchReqKind = 9
GroupCoordinatorReqKind = 10
// receive the latest offset (i.e. the offset of the next coming message)
OffsetReqTimeLatest = -1
// receive the earliest available offset. Note that because offsets are
// pulled in descending order, asking for the earliest offset will always
// return you a single element.
OffsetReqTimeEarliest = -2
// Server will not send any response.
RequiredAcksNone = 0
// Server will block until the message is committed by all in sync replicas
// before sending a response.
RequiredAcksAll = -1
// Server will wait the data is written to the local log before sending a
// response.
RequiredAcksLocal = 1
)
type Compression int8
const (
CompressionNone Compression = 0
CompressionGzip Compression = 1
CompressionSnappy Compression = 2
)
type Request interface {
WriteTo(io.Writer) (int64, error)
}
// ReadReq returns request kind ID and byte representation of the whole message
// in wire protocol format.
func ReadReq(r io.Reader) (requestKind int16, b []byte, err error) {
dec := NewDecoder(r)
msgSize := dec.DecodeInt32()
requestKind = dec.DecodeInt16()
if err := dec.Err(); err != nil {
return 0, nil, err
}
// size of the message + size of the message itself
b = make([]byte, msgSize+4)
binary.BigEndian.PutUint32(b, uint32(msgSize))
binary.BigEndian.PutUint16(b[4:], uint16(requestKind))
if _, err := io.ReadFull(r, b[6:]); err != nil {
return 0, nil, err
}
return requestKind, b, err
}
// ReadResp returns message correlation ID and byte representation of the whole
// message in wire protocol that is returned when reading from given stream,
// including 4 bytes of message size itself.
// Byte representation returned by ReadResp can be parsed by all response
// reeaders to transform it into specialized response structure.
func ReadResp(r io.Reader) (correlationID int32, b []byte, err error) {
dec := NewDecoder(r)
msgSize := dec.DecodeInt32()
correlationID = dec.DecodeInt32()
if err := dec.Err(); err != nil {
return 0, nil, err
}
// size of the message + size of the message itself
b = make([]byte, msgSize+4)
binary.BigEndian.PutUint32(b, uint32(msgSize))
binary.BigEndian.PutUint32(b[4:], uint32(correlationID))
_, err = io.ReadFull(r, b[8:])
return correlationID, b, err
}
// Message represents single entity of message set.
type Message struct {
Key []byte
Value []byte
Offset int64 // set when fetching and after successful producing
Crc uint32 // set when fetching, ignored when producing
Topic string // set when fetching, ignored when producing
Partition int32 // set when fetching, ignored when producing
TipOffset int64 // set when fetching, ignored when processing
}
// ComputeCrc returns crc32 hash for given message content.
func ComputeCrc(m *Message, compression Compression) uint32 {
var buf bytes.Buffer
enc := NewEncoder(&buf)
enc.EncodeInt8(0) // magic byte is always 0
enc.EncodeInt8(int8(compression))
enc.EncodeBytes(m.Key)
enc.EncodeBytes(m.Value)
return crc32.ChecksumIEEE(buf.Bytes())
}
// writeMessageSet writes a Message Set into w.
// It returns the number of bytes written and any error.
func writeMessageSet(w io.Writer, messages []*Message, compression Compression) (int, error) {
if len(messages) == 0 {
return 0, nil
}
// NOTE(caleb): it doesn't appear to be documented, but I observed that the
// Java client sets the offset of the synthesized message set for a group of
// compressed messages to be the offset of the last message in the set.
compressOffset := messages[len(messages)-1].Offset
switch compression {
case CompressionGzip:
var buf bytes.Buffer
gz := gzip.NewWriter(&buf)
if _, err := writeMessageSet(gz, messages, CompressionNone); err != nil {
return 0, err
}
if err := gz.Close(); err != nil {
return 0, err
}
messages = []*Message{
{
Value: buf.Bytes(),
Offset: compressOffset,
},
}
case CompressionSnappy:
var buf bytes.Buffer
if _, err := writeMessageSet(&buf, messages, CompressionNone); err != nil {
return 0, err
}
messages = []*Message{
{
Value: snappy.Encode(nil, buf.Bytes()),
Offset: compressOffset,
},
}
}
totalSize := 0
b := newSliceWriter(0)
for _, message := range messages {
bsize := 26 + len(message.Key) + len(message.Value)
b.Reset(bsize)
enc := NewEncoder(b)
enc.EncodeInt64(message.Offset)
msize := int32(14 + len(message.Key) + len(message.Value))
enc.EncodeInt32(msize)
enc.EncodeUint32(0) // crc32 placeholder
enc.EncodeInt8(0) // magic byte
enc.EncodeInt8(int8(compression))
enc.EncodeBytes(message.Key)
enc.EncodeBytes(message.Value)
if err := enc.Err(); err != nil {
return totalSize, err
}
const hsize = 8 + 4 + 4 // offset + message size + crc32
const crcoff = 8 + 4 // offset + message size
binary.BigEndian.PutUint32(b.buf[crcoff:crcoff+4], crc32.ChecksumIEEE(b.buf[hsize:bsize]))
if n, err := w.Write(b.Slice()); err != nil {
return totalSize, err
} else {
totalSize += n
}
}
return totalSize, nil
}
type slicewriter struct {
buf []byte
pos int
size int
}
func newSliceWriter(bufsize int) *slicewriter {
return &slicewriter{
buf: make([]byte, bufsize),
pos: 0,
}
}
func (w *slicewriter) Write(p []byte) (int, error) {
if len(w.buf) < w.pos+len(p) {
return 0, errors.New("buffer too small")
}
copy(w.buf[w.pos:], p)
w.pos += len(p)
return len(p), nil
}
func (w *slicewriter) Reset(size int) {
if size > len(w.buf) {
w.buf = make([]byte, size+1000) // allocate a bit more than required
}
w.size = size
w.pos = 0
}
func (w *slicewriter) Slice() []byte {
return w.buf[:w.pos]
}
// readMessageSet reads and return messages from the stream.
// The size is known before a message set is decoded.
// Because kafka is sending message set directly from the drive, it might cut
// off part of the last message. This also means that the last message can be
// shorter than the header is saying. In such case just ignore the last
// malformed message from the set and returned earlier data.
func readMessageSet(r io.Reader, size int32) ([]*Message, error) {
rd := io.LimitReader(r, int64(size))
dec := NewDecoder(rd)
set := make([]*Message, 0, 256)
var buf []byte
for {
offset := dec.DecodeInt64()
if err := dec.Err(); err != nil {
if err == io.EOF || err == io.ErrUnexpectedEOF {
return set, nil
}
return nil, err
}
// single message size
size := dec.DecodeInt32()
if err := dec.Err(); err != nil {
if err == io.EOF || err == io.ErrUnexpectedEOF {
return set, nil
}
return nil, err
}
// read message to buffer to compute its content crc
if int(size) > len(buf) {
// allocate a bit more than needed
buf = make([]byte, size+10240)
}
msgbuf := buf[:size]
if _, err := io.ReadFull(rd, msgbuf); err != nil {
if err == io.EOF || err == io.ErrUnexpectedEOF {
return set, nil
}
return nil, err
}
msgdec := NewDecoder(bytes.NewBuffer(msgbuf))
msg := &Message{
Offset: offset,
Crc: msgdec.DecodeUint32(),
}
if msg.Crc != crc32.ChecksumIEEE(msgbuf[4:]) {
// ignore this message and because we want to have constant
// history, do not process anything more
return set, nil
}
// magic byte
_ = msgdec.DecodeInt8()
attributes := msgdec.DecodeInt8()
switch compression := Compression(attributes & 3); compression {
case CompressionNone:
msg.Key = msgdec.DecodeBytes()
msg.Value = msgdec.DecodeBytes()
if err := msgdec.Err(); err != nil {
return nil, fmt.Errorf("cannot decode message: %s", err)
}
set = append(set, msg)
case CompressionGzip, CompressionSnappy:
_ = msgdec.DecodeBytes() // ignore key
val := msgdec.DecodeBytes()
if err := msgdec.Err(); err != nil {
return nil, fmt.Errorf("cannot decode message: %s", err)
}
var decoded []byte
switch compression {
case CompressionGzip:
cr, err := gzip.NewReader(bytes.NewReader(val))
if err != nil {
return nil, fmt.Errorf("error decoding gzip message: %s", err)
}
decoded, err = ioutil.ReadAll(cr)
if err != nil {
return nil, fmt.Errorf("error decoding gzip message: %s", err)
}
_ = cr.Close()
case CompressionSnappy:
var err error
decoded, err = snappyDecode(val)
if err != nil {
return nil, fmt.Errorf("error decoding snappy message: %s", err)
}
}
msgs, err := readMessageSet(bytes.NewReader(decoded), int32(len(decoded)))
if err != nil {
return nil, err
}
set = append(set, msgs...)
default:
return nil, fmt.Errorf("cannot handle compression method: %d", compression)
}
}
}
type MetadataReq struct {
CorrelationID int32
ClientID string
Topics []string
}
func ReadMetadataReq(r io.Reader) (*MetadataReq, error) {
var req MetadataReq
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
// api key + api version
_ = dec.DecodeInt32()
req.CorrelationID = dec.DecodeInt32()
req.ClientID = dec.DecodeString()
req.Topics = make([]string, dec.DecodeArrayLen())
for i := range req.Topics {
req.Topics[i] = dec.DecodeString()
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &req, nil
}
func (r *MetadataReq) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(int16(MetadataReqKind))
enc.Encode(int16(0))
enc.Encode(r.CorrelationID)
enc.Encode(r.ClientID)
enc.EncodeArrayLen(len(r.Topics))
for _, name := range r.Topics {
enc.Encode(name)
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func (r *MetadataReq) WriteTo(w io.Writer) (int64, error) {
b, err := r.Bytes()
if err != nil {
return 0, err
}
n, err := w.Write(b)
return int64(n), err
}
type MetadataResp struct {
CorrelationID int32
Brokers []MetadataRespBroker
Topics []MetadataRespTopic
}
type MetadataRespBroker struct {
NodeID int32
Host string
Port int32
}
type MetadataRespTopic struct {
Name string
Err error
Partitions []MetadataRespPartition
}
type MetadataRespPartition struct {
ID int32
Err error
Leader int32
Replicas []int32
Isrs []int32
}
func (r *MetadataResp) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(r.CorrelationID)
enc.EncodeArrayLen(len(r.Brokers))
for _, broker := range r.Brokers {
enc.Encode(broker.NodeID)
enc.Encode(broker.Host)
enc.Encode(broker.Port)
}
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.EncodeError(topic.Err)
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.EncodeError(part.Err)
enc.Encode(part.ID)
enc.Encode(part.Leader)
enc.Encode(part.Replicas)
enc.Encode(part.Isrs)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func ReadMetadataResp(r io.Reader) (*MetadataResp, error) {
var resp MetadataResp
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
resp.CorrelationID = dec.DecodeInt32()
resp.Brokers = make([]MetadataRespBroker, dec.DecodeArrayLen())
for i := range resp.Brokers {
var b = &resp.Brokers[i]
b.NodeID = dec.DecodeInt32()
b.Host = dec.DecodeString()
b.Port = dec.DecodeInt32()
}
resp.Topics = make([]MetadataRespTopic, dec.DecodeArrayLen())
for ti := range resp.Topics {
var t = &resp.Topics[ti]
t.Err = errFromNo(dec.DecodeInt16())
t.Name = dec.DecodeString()
t.Partitions = make([]MetadataRespPartition, dec.DecodeArrayLen())
for pi := range t.Partitions {
var p = &t.Partitions[pi]
p.Err = errFromNo(dec.DecodeInt16())
p.ID = dec.DecodeInt32()
p.Leader = dec.DecodeInt32()
p.Replicas = make([]int32, dec.DecodeArrayLen())
for ri := range p.Replicas {
p.Replicas[ri] = dec.DecodeInt32()
}
p.Isrs = make([]int32, dec.DecodeArrayLen())
for ii := range p.Isrs {
p.Isrs[ii] = dec.DecodeInt32()
}
}
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &resp, nil
}
type FetchReq struct {
CorrelationID int32
ClientID string
MaxWaitTime time.Duration
MinBytes int32
Topics []FetchReqTopic
}
type FetchReqTopic struct {
Name string
Partitions []FetchReqPartition
}
type FetchReqPartition struct {
ID int32
FetchOffset int64
MaxBytes int32
}
func ReadFetchReq(r io.Reader) (*FetchReq, error) {
var req FetchReq
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
// api key + api version
_ = dec.DecodeInt32()
req.CorrelationID = dec.DecodeInt32()
req.ClientID = dec.DecodeString()
// replica id
_ = dec.DecodeInt32()
req.MaxWaitTime = time.Duration(dec.DecodeInt32()) * time.Millisecond
req.MinBytes = dec.DecodeInt32()
req.Topics = make([]FetchReqTopic, dec.DecodeArrayLen())
for ti := range req.Topics {
var topic = &req.Topics[ti]
topic.Name = dec.DecodeString()
topic.Partitions = make([]FetchReqPartition, dec.DecodeArrayLen())
for pi := range topic.Partitions {
var part = &topic.Partitions[pi]
part.ID = dec.DecodeInt32()
part.FetchOffset = dec.DecodeInt64()
part.MaxBytes = dec.DecodeInt32()
}
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &req, nil
}
func (r *FetchReq) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(int16(FetchReqKind))
enc.Encode(int16(0))
enc.Encode(r.CorrelationID)
enc.Encode(r.ClientID)
// replica id
enc.Encode(int32(-1))
enc.Encode(int32(r.MaxWaitTime / time.Millisecond))
enc.Encode(r.MinBytes)
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.Encode(part.ID)
enc.Encode(part.FetchOffset)
enc.Encode(part.MaxBytes)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func (r *FetchReq) WriteTo(w io.Writer) (int64, error) {
b, err := r.Bytes()
if err != nil {
return 0, err
}
n, err := w.Write(b)
return int64(n), err
}
type FetchResp struct {
CorrelationID int32
Topics []FetchRespTopic
}
type FetchRespTopic struct {
Name string
Partitions []FetchRespPartition
}
type FetchRespPartition struct {
ID int32
Err error
TipOffset int64
Messages []*Message
}
func (r *FetchResp) Bytes() ([]byte, error) {
var buf buffer
enc := NewEncoder(&buf)
enc.Encode(int32(0)) // placeholder
enc.Encode(r.CorrelationID)
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.Encode(part.ID)
enc.EncodeError(part.Err)
enc.Encode(part.TipOffset)
i := len(buf)
enc.Encode(int32(0)) // placeholder
// NOTE(caleb): writing compressed fetch response isn't implemented
// for now, since that's not needed for clients.
n, err := writeMessageSet(&buf, part.Messages, CompressionNone)
if err != nil {
return nil, err
}
binary.BigEndian.PutUint32(buf[i:i+4], uint32(n))
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
binary.BigEndian.PutUint32(buf[:4], uint32(len(buf)-4))
return []byte(buf), nil
}
func ReadFetchResp(r io.Reader) (*FetchResp, error) {
var err error
var resp FetchResp
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
resp.CorrelationID = dec.DecodeInt32()
resp.Topics = make([]FetchRespTopic, dec.DecodeArrayLen())
for ti := range resp.Topics {
var topic = &resp.Topics[ti]
topic.Name = dec.DecodeString()
topic.Partitions = make([]FetchRespPartition, dec.DecodeArrayLen())
for pi := range topic.Partitions {
var part = &topic.Partitions[pi]
part.ID = dec.DecodeInt32()
part.Err = errFromNo(dec.DecodeInt16())
part.TipOffset = dec.DecodeInt64()
if dec.Err() != nil {
return nil, dec.Err()
}
msgSetSize := dec.DecodeInt32()
if dec.Err() != nil {
return nil, dec.Err()
}
if part.Messages, err = readMessageSet(r, msgSetSize); err != nil {
return nil, err
}
for _, msg := range part.Messages {
msg.Topic = topic.Name
msg.Partition = part.ID
msg.TipOffset = part.TipOffset
}
}
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &resp, nil
}
type GroupCoordinatorReq struct {
CorrelationID int32
ClientID string
ConsumerGroup string
}
func ReadGroupCoordinatorReq(r io.Reader) (*GroupCoordinatorReq, error) {
var req GroupCoordinatorReq
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
// api key + api version
_ = dec.DecodeInt32()
req.CorrelationID = dec.DecodeInt32()
req.ClientID = dec.DecodeString()
req.ConsumerGroup = dec.DecodeString()
if dec.Err() != nil {
return nil, dec.Err()
}
return &req, nil
}
func (r *GroupCoordinatorReq) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(int16(GroupCoordinatorReqKind))
enc.Encode(int16(0))
enc.Encode(r.CorrelationID)
enc.Encode(r.ClientID)
enc.Encode(r.ConsumerGroup)
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func (r *GroupCoordinatorReq) WriteTo(w io.Writer) (int64, error) {
b, err := r.Bytes()
if err != nil {
return 0, err
}
n, err := w.Write(b)
return int64(n), err
}
type GroupCoordinatorResp struct {
CorrelationID int32
Err error
CoordinatorID int32
CoordinatorHost string
CoordinatorPort int32
}
func ReadGroupCoordinatorResp(r io.Reader) (*GroupCoordinatorResp, error) {
var resp GroupCoordinatorResp
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
resp.CorrelationID = dec.DecodeInt32()
resp.Err = errFromNo(dec.DecodeInt16())
resp.CoordinatorID = dec.DecodeInt32()
resp.CoordinatorHost = dec.DecodeString()
resp.CoordinatorPort = dec.DecodeInt32()
if err := dec.Err(); err != nil {
return nil, err
}
return &resp, nil
}
func (r *GroupCoordinatorResp) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(r.CorrelationID)
enc.EncodeError(r.Err)
enc.Encode(r.CoordinatorID)
enc.Encode(r.CoordinatorHost)
enc.Encode(r.CoordinatorPort)
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
type OffsetCommitReq struct {
CorrelationID int32
ClientID string
ConsumerGroup string
Topics []OffsetCommitReqTopic
}
type OffsetCommitReqTopic struct {
Name string
Partitions []OffsetCommitReqPartition
}
type OffsetCommitReqPartition struct {
ID int32
Offset int64
TimeStamp time.Time
Metadata string
}
func ReadOffsetCommitReq(r io.Reader) (*OffsetCommitReq, error) {
var req OffsetCommitReq
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
// api key
_ = dec.DecodeInt16()
apiVersion := dec.DecodeInt16()
req.CorrelationID = dec.DecodeInt32()
req.ClientID = dec.DecodeString()
req.ConsumerGroup = dec.DecodeString()
if apiVersion == 1 {
_ = dec.DecodeInt32()
_ = dec.DecodeString()
}
req.Topics = make([]OffsetCommitReqTopic, dec.DecodeArrayLen())
for ti := range req.Topics {
var topic = &req.Topics[ti]
topic.Name = dec.DecodeString()
topic.Partitions = make([]OffsetCommitReqPartition, dec.DecodeArrayLen())
for pi := range topic.Partitions {
var part = &topic.Partitions[pi]
part.ID = dec.DecodeInt32()
part.Offset = dec.DecodeInt64()
part.TimeStamp = time.Unix(0, dec.DecodeInt64()*int64(time.Millisecond))
part.Metadata = dec.DecodeString()
}
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &req, nil
}
func (r *OffsetCommitReq) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(int16(OffsetCommitReqKind))
enc.Encode(int16(1)) // version - must be 1 to use Kafka committed offsets instead of ZK
enc.Encode(r.CorrelationID)
enc.Encode(r.ClientID)
enc.Encode(r.ConsumerGroup)
enc.Encode(int32(-1)) // ConsumerGroupGenerationId
enc.Encode("") // ConsumerId
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.Encode(part.ID)
enc.Encode(part.Offset)
enc.Encode(int64(-1)) // -1 is "use current time"
enc.Encode(part.Metadata)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func (r *OffsetCommitReq) WriteTo(w io.Writer) (int64, error) {
b, err := r.Bytes()
if err != nil {
return 0, err
}
n, err := w.Write(b)
return int64(n), err
}
type OffsetCommitResp struct {
CorrelationID int32
Topics []OffsetCommitRespTopic
}
type OffsetCommitRespTopic struct {
Name string
Partitions []OffsetCommitRespPartition
}
type OffsetCommitRespPartition struct {
ID int32
Err error
}
func ReadOffsetCommitResp(r io.Reader) (*OffsetCommitResp, error) {
var resp OffsetCommitResp
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
resp.CorrelationID = dec.DecodeInt32()
resp.Topics = make([]OffsetCommitRespTopic, dec.DecodeArrayLen())
for ti := range resp.Topics {
var t = &resp.Topics[ti]
t.Name = dec.DecodeString()
t.Partitions = make([]OffsetCommitRespPartition, dec.DecodeArrayLen())
for pi := range t.Partitions {
var p = &t.Partitions[pi]
p.ID = dec.DecodeInt32()
p.Err = errFromNo(dec.DecodeInt16())
}
}
if err := dec.Err(); err != nil {
return nil, err
}
return &resp, nil
}
func (r *OffsetCommitResp) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(r.CorrelationID)
enc.EncodeArrayLen(len(r.Topics))
for _, t := range r.Topics {
enc.Encode(t.Name)
enc.EncodeArrayLen(len(t.Partitions))
for _, p := range t.Partitions {
enc.Encode(p.ID)
enc.EncodeError(p.Err)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
type OffsetFetchReq struct {
CorrelationID int32
ClientID string
ConsumerGroup string
Topics []OffsetFetchReqTopic
}
type OffsetFetchReqTopic struct {
Name string
Partitions []int32
}
func ReadOffsetFetchReq(r io.Reader) (*OffsetFetchReq, error) {
var req OffsetFetchReq
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
// api key + api version
_ = dec.DecodeInt32()
req.CorrelationID = dec.DecodeInt32()
req.ClientID = dec.DecodeString()
req.ConsumerGroup = dec.DecodeString()
req.Topics = make([]OffsetFetchReqTopic, dec.DecodeArrayLen())
for ti := range req.Topics {
var topic = &req.Topics[ti]
topic.Name = dec.DecodeString()
topic.Partitions = make([]int32, dec.DecodeArrayLen())
for pi := range topic.Partitions {
topic.Partitions[pi] = dec.DecodeInt32()
}
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &req, nil
}
func (r *OffsetFetchReq) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(int16(OffsetFetchReqKind))
enc.Encode(int16(1)) // version - must be 1 to use Kafka committed offsets instead of ZK
enc.Encode(r.CorrelationID)
enc.Encode(r.ClientID)
enc.Encode(r.ConsumerGroup)
enc.EncodeArrayLen(len(r.Topics))
for _, t := range r.Topics {
enc.Encode(t.Name)
enc.EncodeArrayLen(len(t.Partitions))
for _, p := range t.Partitions {
enc.Encode(p)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func (r *OffsetFetchReq) WriteTo(w io.Writer) (int64, error) {
b, err := r.Bytes()
if err != nil {
return 0, err
}
n, err := w.Write(b)
return int64(n), err
}
type OffsetFetchResp struct {
CorrelationID int32
Topics []OffsetFetchRespTopic
}
type OffsetFetchRespTopic struct {
Name string
Partitions []OffsetFetchRespPartition
}
type OffsetFetchRespPartition struct {
ID int32
Offset int64
Metadata string
Err error
}
func ReadOffsetFetchResp(r io.Reader) (*OffsetFetchResp, error) {
var resp OffsetFetchResp
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
resp.CorrelationID = dec.DecodeInt32()
resp.Topics = make([]OffsetFetchRespTopic, dec.DecodeArrayLen())
for ti := range resp.Topics {
var t = &resp.Topics[ti]
t.Name = dec.DecodeString()
t.Partitions = make([]OffsetFetchRespPartition, dec.DecodeArrayLen())
for pi := range t.Partitions {
var p = &t.Partitions[pi]
p.ID = dec.DecodeInt32()
p.Offset = dec.DecodeInt64()
p.Metadata = dec.DecodeString()
p.Err = errFromNo(dec.DecodeInt16())
}
}
if err := dec.Err(); err != nil {
return nil, err
}
return &resp, nil
}
func (r *OffsetFetchResp) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(r.CorrelationID)
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.Encode(part.ID)
enc.Encode(part.Offset)
enc.Encode(part.Metadata)
enc.EncodeError(part.Err)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
type ProduceReq struct {
CorrelationID int32
ClientID string
Compression Compression // only used when sending ProduceReqs
RequiredAcks int16
Timeout time.Duration
Topics []ProduceReqTopic
}
type ProduceReqTopic struct {
Name string
Partitions []ProduceReqPartition
}
type ProduceReqPartition struct {
ID int32
Messages []*Message
}
func ReadProduceReq(r io.Reader) (*ProduceReq, error) {
var req ProduceReq
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
// api key + api version
_ = dec.DecodeInt32()
req.CorrelationID = dec.DecodeInt32()
req.ClientID = dec.DecodeString()
req.RequiredAcks = dec.DecodeInt16()
req.Timeout = time.Duration(dec.DecodeInt32()) * time.Millisecond
req.Topics = make([]ProduceReqTopic, dec.DecodeArrayLen())
for ti := range req.Topics {
var topic = &req.Topics[ti]
topic.Name = dec.DecodeString()
topic.Partitions = make([]ProduceReqPartition, dec.DecodeArrayLen())
for pi := range topic.Partitions {
var part = &topic.Partitions[pi]
part.ID = dec.DecodeInt32()
if dec.Err() != nil {
return nil, dec.Err()
}
msgSetSize := dec.DecodeInt32()
if dec.Err() != nil {
return nil, dec.Err()
}
var err error
if part.Messages, err = readMessageSet(r, msgSetSize); err != nil {
return nil, err
}
}
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &req, nil
}
func (r *ProduceReq) Bytes() ([]byte, error) {
var buf buffer
enc := NewEncoder(&buf)
enc.EncodeInt32(0) // placeholder
enc.EncodeInt16(ProduceReqKind)
enc.EncodeInt16(0)
enc.EncodeInt32(r.CorrelationID)
enc.EncodeString(r.ClientID)
enc.EncodeInt16(r.RequiredAcks)
enc.EncodeInt32(int32(r.Timeout / time.Millisecond))
enc.EncodeArrayLen(len(r.Topics))
for _, t := range r.Topics {
enc.EncodeString(t.Name)
enc.EncodeArrayLen(len(t.Partitions))
for _, p := range t.Partitions {
enc.EncodeInt32(p.ID)
i := len(buf)
enc.EncodeInt32(0) // placeholder
n, err := writeMessageSet(&buf, p.Messages, r.Compression)
if err != nil {
return nil, err
}
binary.BigEndian.PutUint32(buf[i:i+4], uint32(n))
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
binary.BigEndian.PutUint32(buf[0:4], uint32(len(buf)-4))
return []byte(buf), nil
}
func (r *ProduceReq) WriteTo(w io.Writer) (int64, error) {
b, err := r.Bytes()
if err != nil {
return 0, err
}
n, err := w.Write(b)
return int64(n), err
}
type ProduceResp struct {
CorrelationID int32
Topics []ProduceRespTopic
}
type ProduceRespTopic struct {
Name string
Partitions []ProduceRespPartition
}
type ProduceRespPartition struct {
ID int32
Err error
Offset int64
}
func (r *ProduceResp) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(r.CorrelationID)
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.Encode(part.ID)
enc.EncodeError(part.Err)
enc.Encode(part.Offset)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func ReadProduceResp(r io.Reader) (*ProduceResp, error) {
var resp ProduceResp
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
resp.CorrelationID = dec.DecodeInt32()
resp.Topics = make([]ProduceRespTopic, dec.DecodeArrayLen())
for ti := range resp.Topics {
var t = &resp.Topics[ti]
t.Name = dec.DecodeString()
t.Partitions = make([]ProduceRespPartition, dec.DecodeArrayLen())
for pi := range t.Partitions {
var p = &t.Partitions[pi]
p.ID = dec.DecodeInt32()
p.Err = errFromNo(dec.DecodeInt16())
p.Offset = dec.DecodeInt64()
}
}
if err := dec.Err(); err != nil {
return nil, err
}
return &resp, nil
}
type OffsetReq struct {
CorrelationID int32
ClientID string
ReplicaID int32
Topics []OffsetReqTopic
}
type OffsetReqTopic struct {
Name string
Partitions []OffsetReqPartition
}
type OffsetReqPartition struct {
ID int32
TimeMs int64 // cannot be time.Time because of negative values
MaxOffsets int32
}
func ReadOffsetReq(r io.Reader) (*OffsetReq, error) {
var req OffsetReq
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
// api key + api version
_ = dec.DecodeInt32()
req.CorrelationID = dec.DecodeInt32()
req.ClientID = dec.DecodeString()
req.ReplicaID = dec.DecodeInt32()
req.Topics = make([]OffsetReqTopic, dec.DecodeArrayLen())
for ti := range req.Topics {
var topic = &req.Topics[ti]
topic.Name = dec.DecodeString()
topic.Partitions = make([]OffsetReqPartition, dec.DecodeArrayLen())
for pi := range topic.Partitions {
var part = &topic.Partitions[pi]
part.ID = dec.DecodeInt32()
part.TimeMs = dec.DecodeInt64()
part.MaxOffsets = dec.DecodeInt32()
}
}
if dec.Err() != nil {
return nil, dec.Err()
}
return &req, nil
}
func (r *OffsetReq) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(int16(OffsetReqKind))
enc.Encode(int16(0))
enc.Encode(r.CorrelationID)
enc.Encode(r.ClientID)
enc.Encode(r.ReplicaID)
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.Encode(part.ID)
enc.Encode(part.TimeMs)
enc.Encode(part.MaxOffsets)
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
func (r *OffsetReq) WriteTo(w io.Writer) (int64, error) {
b, err := r.Bytes()
if err != nil {
return 0, err
}
n, err := w.Write(b)
return int64(n), err
}
type OffsetResp struct {
CorrelationID int32
Topics []OffsetRespTopic
}
type OffsetRespTopic struct {
Name string
Partitions []OffsetRespPartition
}
type OffsetRespPartition struct {
ID int32
Err error
Offsets []int64
}
func ReadOffsetResp(r io.Reader) (*OffsetResp, error) {
var resp OffsetResp
dec := NewDecoder(r)
// total message size
_ = dec.DecodeInt32()
resp.CorrelationID = dec.DecodeInt32()
resp.Topics = make([]OffsetRespTopic, dec.DecodeArrayLen())
for ti := range resp.Topics {
var t = &resp.Topics[ti]
t.Name = dec.DecodeString()
t.Partitions = make([]OffsetRespPartition, dec.DecodeArrayLen())
for pi := range t.Partitions {
var p = &t.Partitions[pi]
p.ID = dec.DecodeInt32()
p.Err = errFromNo(dec.DecodeInt16())
p.Offsets = make([]int64, dec.DecodeArrayLen())
for oi := range p.Offsets {
p.Offsets[oi] = dec.DecodeInt64()
}
}
}
if err := dec.Err(); err != nil {
return nil, err
}
return &resp, nil
}
func (r *OffsetResp) Bytes() ([]byte, error) {
var buf bytes.Buffer
enc := NewEncoder(&buf)
// message size - for now just placeholder
enc.Encode(int32(0))
enc.Encode(r.CorrelationID)
enc.EncodeArrayLen(len(r.Topics))
for _, topic := range r.Topics {
enc.Encode(topic.Name)
enc.EncodeArrayLen(len(topic.Partitions))
for _, part := range topic.Partitions {
enc.Encode(part.ID)
enc.EncodeError(part.Err)
enc.EncodeArrayLen(len(part.Offsets))
for _, off := range part.Offsets {
enc.Encode(off)
}
}
}
if enc.Err() != nil {
return nil, enc.Err()
}
// update the message size information
b := buf.Bytes()
binary.BigEndian.PutUint32(b, uint32(len(b)-4))
return b, nil
}
type buffer []byte
func (b *buffer) Write(p []byte) (int, error) {
*b = append(*b, p...)
return len(p), nil
}
| {
"content_hash": "b71d4f0743bade30a6fc98712d9585fa",
"timestamp": "",
"source": "github",
"line_count": 1483,
"max_line_length": 116,
"avg_line_length": 23.54686446392448,
"alnum_prop": 0.6775200458190149,
"repo_name": "zorkian/kafka",
"id": "1d68e94dd622612044c37f064f506a925a7ffa33",
"size": "34920",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "proto/messages.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "297675"
},
{
"name": "Shell",
"bytes": "2493"
}
],
"symlink_target": ""
} |
package net.minder.config;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
public class MapMethodSampleTest {
public static class Target {
private int limit = 3;
@Configure
public void setRetryLimit( int value ) {
limit = value;
}
}
static Map<String,String> config = new HashMap<String,String>();
static { config.put( "retryLimit", "5" ); }
@Test
public void sample() {
Target target = new Target();
ConfigurationInjectorBuilder.configuration().target( target ).source( config ).inject();
assertThat( target.limit, is( 5 ) );
}
}
| {
"content_hash": "5a6fbf4948a474183a34191de97f8f5b",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 92,
"avg_line_length": 21.484848484848484,
"alnum_prop": 0.692524682651622,
"repo_name": "kminder/config-injector",
"id": "67096ffdec9e3325bf726a15600f1dc283c7a2be",
"size": "1515",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/net/minder/config/MapMethodSampleTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "65663"
}
],
"symlink_target": ""
} |
import hbs from 'htmlbars-inline-precompile';
import sinon from 'sinon';
import wait from 'ember-test-helpers/wait';
import {describe, it} from 'mocha';
import {expect} from 'chai';
import {run} from '@ember/runloop';
import {setupComponentTest} from 'ember-mocha';
describe('Integration: Component: gh-timezone-select', function () {
setupComponentTest('gh-timezone-select', {
integration: true
});
beforeEach(function () {
this.set('availableTimezones', [
{name: 'Pacific/Pago_Pago', label: '(GMT -11:00) Midway Island, Samoa'},
{name: 'Etc/UTC', label: '(GMT) UTC'},
{name: 'Pacific/Kwajalein', label: '(GMT +12:00) International Date Line West'}
]);
this.set('activeTimezone', 'Etc/UTC');
});
it('renders', function () {
this.render(hbs`{{gh-timezone-select
availableTimezones=availableTimezones
activeTimezone=activeTimezone}}`);
expect(this.$(), 'top-level elements').to.have.length(1);
expect(this.$('option'), 'number of options').to.have.length(3);
expect(this.$('select').val(), 'selected option value').to.equal('Etc/UTC');
});
it('handles an unknown timezone', function () {
this.set('activeTimezone', 'Europe/London');
this.render(hbs`{{gh-timezone-select
availableTimezones=availableTimezones
activeTimezone=activeTimezone}}`);
// we have an additional blank option at the top
expect(this.$('option'), 'number of options').to.have.length(4);
// blank option is selected
expect(this.$('select').val(), 'selected option value').to.equal('');
// we indicate the manual override
expect(this.$('p').text()).to.match(/Your timezone has been automatically set to Europe\/London/);
});
it('triggers update action on change', function (done) {
let update = sinon.spy();
this.set('update', update);
this.render(hbs`{{gh-timezone-select
availableTimezones=availableTimezones
activeTimezone=activeTimezone
update=(action update)}}`);
run(() => {
this.$('select').val('Pacific/Pago_Pago').change();
});
wait().then(() => {
expect(update.calledOnce, 'update was called once').to.be.true;
expect(update.firstCall.args[0].name, 'update was passed new timezone')
.to.equal('Pacific/Pago_Pago');
done();
});
});
// TODO: mock clock service, fake the time, test we have the correct
// local time and it changes alongside selection changes
it('renders local time');
});
| {
"content_hash": "77f6caa72d1fa209fdcba5f8444c70bd",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 106,
"avg_line_length": 37.44444444444444,
"alnum_prop": 0.6027448071216617,
"repo_name": "acburdine/Ghost-Admin",
"id": "2c5b2f1d6661f6523062916ad25a9c6a628a0fe4",
"size": "2696",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/integration/components/gh-timezone-select-test.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "517541"
},
{
"name": "HTML",
"bytes": "307288"
},
{
"name": "JavaScript",
"bytes": "1261589"
}
],
"symlink_target": ""
} |
package com.google.cloudsearch.tutorial;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.services.cloudsearch.v1.CloudSearch;
import com.google.api.services.cloudsearch.v1.model.Operation;
import com.google.api.services.cloudsearch.v1.model.Schema;
import com.google.api.services.cloudsearch.v1.model.Status;
import com.google.api.services.cloudsearch.v1.model.UpdateSchemaRequest;
import com.google.enterprise.cloudsearch.sdk.CredentialFactory;
import com.google.enterprise.cloudsearch.sdk.LocalFileCredentialFactory;
import com.google.enterprise.cloudsearch.sdk.config.ConfigValue;
import com.google.enterprise.cloudsearch.sdk.config.Configuration;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Collections;
/**
* Utility for updating the schema for a data source. Uses
* the Connector SDK for configuration.
*
* <p>Sample properties file:
*
* <pre>
* # Required properties for accessing data source
* # (These values are created by the admin before running the connector)
* api.sourceId=1234567890abcdef
*
* # Path to service account credentials
* api.serviceAccountPrivateKeyFile=./PrivateKey.json
*
* # Path to the schema file, used by the SchemaTool utility
* github.schema=schema.json
* </pre>
*/
public class SchemaTool {
public static final int OPERATION_POLL_INTERVAL = 3 * 1000;
/**
* Main entry point for the schema tool.
*/
public static void main(String[] argv) throws Exception {
Configuration.initConfig(argv);
ConfigValue<String> sourceId = Configuration.getString("api.sourceId", null);
ConfigValue<String> localSchema = Configuration.getString("github.schema", null);
if (sourceId.get() == null) {
throw new IllegalArgumentException("Missing api.sourceId value in configuration");
}
if (localSchema.get() == null) {
throw new IllegalArgumentException("Missing github.schema value in configuration");
}
updateSchema(sourceId.get(), localSchema.get());
}
/**
* Builds the CloudSearch service using the credentials as configured in the SDK.
*
* @return CloudSearch instance
* @throws Exception if unable to read credentials
*/
static CloudSearch buildAuthorizedClient() throws Exception {
CredentialFactory credentialFactory = LocalFileCredentialFactory.fromConfiguration();
GoogleCredential credential = credentialFactory.getCredential(
Collections.singletonList("https://www.googleapis.com/auth/cloud_search"));
// Build the cloud search client
return new CloudSearch.Builder(
Utils.getDefaultTransport(),
Utils.getDefaultJsonFactory(),
credential)
.setApplicationName("Sample connector for GitHub")
.build();
}
/**
* Updates the schema for a datasource.
*
* @param dataSourceId Unique ID of the datasource.
* @param schemaFilePath path to JSON file containing the schema
*/
// [START cloud_search_github_tutorial_update_schema]
static void updateSchema(String dataSourceId, String schemaFilePath) throws Exception {
CloudSearch cloudSearch = buildAuthorizedClient();
Schema schema;
try (BufferedReader br = new BufferedReader(new FileReader(schemaFilePath))) {
schema = cloudSearch.getObjectParser().parseAndClose(br, Schema.class);
}
UpdateSchemaRequest updateSchemaRequest = new UpdateSchemaRequest()
.setSchema(schema);
String resourceName = String.format("datasources/%s", dataSourceId);
Operation operation = cloudSearch.indexing().datasources()
.updateSchema(resourceName, updateSchemaRequest)
.execute();
// Wait for the operation to complete.
while (operation.getDone() == null || operation.getDone() == false) {
// Wait before polling again
Thread.sleep(OPERATION_POLL_INTERVAL);
System.out.printf("Fetching operation: %s\n", operation.getName());
operation = cloudSearch.operations().get(operation.getName()).execute();
}
// Operation is complete, check result
Status error = operation.getError();
if (error != null) {
System.err.printf("Error updating schema: %s\n", error.getMessage());
} else {
System.out.println("Schema updated.");
}
}
// [END cloud_search_github_tutorial_update_schema]
}
| {
"content_hash": "91041d10daf73fd9d822fc4bbb30040a",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 89,
"avg_line_length": 36.78151260504202,
"alnum_prop": 0.7308658898789125,
"repo_name": "gsuitedevs/cloud-search-samples",
"id": "c330e7789fac3bbb030bfea9e23746d28089cec1",
"size": "4971",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "end-to-end/connector/src/main/java/com/google/cloudsearch/tutorial/SchemaTool.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "9675"
},
{
"name": "HTML",
"bytes": "20100"
},
{
"name": "Java",
"bytes": "174146"
},
{
"name": "JavaScript",
"bytes": "33354"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>distributed-reference-counting: Not compatible</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.7.2 / distributed-reference-counting - 8.5.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
distributed-reference-counting
<small>
8.5.0
<span class="label label-info">Not compatible</span>
</small>
</h1>
<p><em><script>document.write(moment("2020-08-29 10:22:14 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2020-08-29 10:22:14 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.12 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-m4 1 Virtual package relying on m4
coq 8.7.2 Formal proof management system.
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.05.0 The OCaml compiler (virtual package)
ocaml-base-compiler 4.05.0 Official 4.05.0 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.8.1 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "[email protected]"
homepage: "https://github.com/coq-contribs/distributed-reference-counting"
license: "LGPL 2"
build: [make "-j%{jobs}%"]
install: [make "install"]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/DistributedReferenceCounting"]
depends: [
"ocaml"
"coq" {>= "8.5" & < "8.6~"}
]
tags: [ "keyword:garbage collection" "keyword:distributed algorithms" "category:Computer Science/Semantics and Compilation/Compilation" "category:Computer Science/Concurrent Systems and Protocols/Correctness of specific protocols" ]
authors: [ "Luc Moreau <>" "Jean Duprat <>" ]
bug-reports: "https://github.com/coq-contribs/distributed-reference-counting/issues"
dev-repo:
"git+https://github.com/coq-contribs/distributed-reference-counting.git"
synopsis: "A Construction of Distributed Reference Counting"
description: """
This library contains the constructive proof of correctness of
several variants of a distributed reference counting algorithm."""
flags: light-uninstall
url {
src:
"https://github.com/coq-contribs/distributed-reference-counting/archive/v8.5.0.tar.gz"
checksum: "md5=2b83383f74374a07e130f7a99342377b"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-distributed-reference-counting.8.5.0 coq.8.7.2</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.7.2).
The following dependencies couldn't be met:
- coq-distributed-reference-counting -> coq < 8.6~ -> ocaml < 4.03.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-distributed-reference-counting.8.5.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
<small>Sources are on <a href="https://github.com/coq-bench">GitHub</a>. © Guillaume Claret.</small>
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "db51590862f256f2e512a0a44151a0ee",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 272,
"avg_line_length": 43.905325443786985,
"alnum_prop": 0.5628032345013477,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "a144cd594b89efedafbd7a43041aaadc140f13d9",
"size": "7422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.05.0-2.0.6/released/8.7.2/distributed-reference-counting/8.5.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package com.example.alan.thunderweather.db;
import org.litepal.crud.DataSupport;
/**
* Created by Alan on 2017/8/21.
*/
public class City extends DataSupport{
private int id;
private String cityName;
private int cityCode;
private int provinceId;
public int getCityCode() {
return cityCode;
}
public void setCityCode(int cityCode) {
this.cityCode = cityCode;
}
public String getCityName() {
return cityName;
}
public void setCityName(String cityName) {
this.cityName = cityName;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getProvinceId() {
return provinceId;
}
public void setProvinceId(int provinceId) {
this.provinceId = provinceId;
}
}
| {
"content_hash": "370d81682e526625d382e4957cc24e9d",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 47,
"avg_line_length": 17.26530612244898,
"alnum_prop": 0.6146572104018913,
"repo_name": "CoderAlan/ThunderWeather",
"id": "8aa20a5f0f37e9cc9959a1390aebf4834e5432a1",
"size": "846",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/example/alan/thunderweather/db/City.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "34198"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import subprocess
from simlammps import read_data_file
from simlammps.cuba_extension import CUBAExtension
from simphony.core.cuba import CUBA
lammps_script = """# example of creating lammps data file (to be then used by SimPhoNy"
dimension 2
atom_style atomic
# create geometry
lattice hex 0.7
region box block 0 20 0 10 -0.25 0.25
create_box 3 box
create_atoms 1 box
mass 1 1.0
mass 2 1.0
mass 3 1.0
# LJ potentials
pair_style lj/cut 1.12246
pair_coeff * * 1.0 1.0 1.12246
# define groups
region 1 block INF INF INF 1.25 INF INF
group lower region 1
region 2 block INF INF 8.75 INF INF INF
group upper region 2
group boundary union lower upper
group flow subtract all boundary
set group lower type 2
set group upper type 3
# initial velocities
compute mobile flow temp
velocity flow create 1.0 482748 temp mobile
velocity boundary set 0.0 0.0 0.0
# write atoms to a lammps data file
write_data example.data"""
with open("lammps_example_script", "w") as script_file:
script_file.write(lammps_script)
subprocess.check_call("lammps < lammps_example_script", shell=True)
particles, state_data = read_data_file("example.data")
print("\n\nFinished converting files")
print("\nA Particles data-set was read from the file:")
print(" '{}' has {} particles".format(
particles.name,
particles.count_of(CUBA.PARTICLE)))
for particles in particles:
number_particles = sum(1 for _ in particles.iter(item_type=CUBA.PARTICLE))
number_materials = sum(1 for _ in state_data.iter_materials())
print("\n{} materials were read from the file.\n".format(number_materials))
box_description = \
""\
"The data-set has the following simulation box description:\n"\
" CUBAExtension.BOX_ORIGIN: {}\n" \
" CUBAExtension.BOX_VECTORS: {}"
print(box_description.format(
particles.data_extension[CUBAExtension.BOX_ORIGIN],
particles.data_extension[CUBAExtension.BOX_VECTORS]))
| {
"content_hash": "e960d24c4955ba3b89273ed52518b381",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 87,
"avg_line_length": 27.135135135135137,
"alnum_prop": 0.7166334661354582,
"repo_name": "simphony/simphony-lammps-md",
"id": "24b2435717e16e2b744d392ca919ab596eff6357",
"size": "2008",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/file_conversion/convert.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "155311"
},
{
"name": "Shell",
"bytes": "1045"
}
],
"symlink_target": ""
} |
#include <apr_pools.h>
#include <apr_hash.h>
#include <apr_tables.h>
#include <string.h> /* for strncmp() */
#include "svn_hash.h"
#include "svn_string.h"
#include "svn_props.h"
#include "svn_error.h"
#include "svn_ctype.h"
#include "private/svn_subr_private.h"
/* All Subversion-specific versioned node properties
* known to this client, that are applicable to both a file and a dir.
*/
#define SVN_PROP__NODE_COMMON_PROPS SVN_PROP_MERGEINFO, \
SVN_PROP_TEXT_TIME, \
SVN_PROP_OWNER, \
SVN_PROP_GROUP, \
SVN_PROP_UNIX_MODE,
/* All Subversion-specific versioned node properties
* known to this client, that are applicable to a dir only.
*/
#define SVN_PROP__NODE_DIR_ONLY_PROPS SVN_PROP_IGNORE, \
SVN_PROP_INHERITABLE_IGNORES, \
SVN_PROP_INHERITABLE_AUTO_PROPS, \
SVN_PROP_EXTERNALS,
/* All Subversion-specific versioned node properties
* known to this client, that are applicable to a file only.
*/
#define SVN_PROP__NODE_FILE_ONLY_PROPS SVN_PROP_MIME_TYPE, \
SVN_PROP_EOL_STYLE, \
SVN_PROP_KEYWORDS, \
SVN_PROP_EXECUTABLE, \
SVN_PROP_NEEDS_LOCK, \
SVN_PROP_SPECIAL,
static const char *const known_rev_props[]
= { SVN_PROP_REVISION_ALL_PROPS
NULL };
static const char *const known_node_props[]
= { SVN_PROP__NODE_COMMON_PROPS
SVN_PROP__NODE_DIR_ONLY_PROPS
SVN_PROP__NODE_FILE_ONLY_PROPS
NULL };
static const char *const known_dir_props[]
= { SVN_PROP__NODE_COMMON_PROPS
SVN_PROP__NODE_DIR_ONLY_PROPS
NULL };
static const char *const known_file_props[]
= { SVN_PROP__NODE_COMMON_PROPS
SVN_PROP__NODE_FILE_ONLY_PROPS
NULL };
static svn_boolean_t
is_known_prop(const char *prop_name,
const char *const *known_props)
{
while (*known_props)
{
if (strcmp(prop_name, *known_props++) == 0)
return TRUE;
}
return FALSE;
}
svn_boolean_t
svn_prop_is_known_svn_rev_prop(const char *prop_name)
{
return is_known_prop(prop_name, known_rev_props);
}
svn_boolean_t
svn_prop_is_known_svn_node_prop(const char *prop_name)
{
return is_known_prop(prop_name, known_node_props);
}
svn_boolean_t
svn_prop_is_known_svn_file_prop(const char *prop_name)
{
return is_known_prop(prop_name, known_file_props);
}
svn_boolean_t
svn_prop_is_known_svn_dir_prop(const char *prop_name)
{
return is_known_prop(prop_name, known_dir_props);
}
svn_boolean_t
svn_prop_is_svn_prop(const char *prop_name)
{
return strncmp(prop_name, SVN_PROP_PREFIX, (sizeof(SVN_PROP_PREFIX) - 1))
== 0;
}
svn_boolean_t
svn_prop_has_svn_prop(const apr_hash_t *props, apr_pool_t *pool)
{
apr_hash_index_t *hi;
const void *prop_name;
if (! props)
return FALSE;
for (hi = apr_hash_first(pool, (apr_hash_t *)props); hi;
hi = apr_hash_next(hi))
{
apr_hash_this(hi, &prop_name, NULL, NULL);
if (svn_prop_is_svn_prop((const char *) prop_name))
return TRUE;
}
return FALSE;
}
#define SIZEOF_WC_PREFIX (sizeof(SVN_PROP_WC_PREFIX) - 1)
#define SIZEOF_ENTRY_PREFIX (sizeof(SVN_PROP_ENTRY_PREFIX) - 1)
svn_prop_kind_t
svn_property_kind2(const char *prop_name)
{
if (strncmp(prop_name, SVN_PROP_WC_PREFIX, SIZEOF_WC_PREFIX) == 0)
return svn_prop_wc_kind;
if (strncmp(prop_name, SVN_PROP_ENTRY_PREFIX, SIZEOF_ENTRY_PREFIX) == 0)
return svn_prop_entry_kind;
return svn_prop_regular_kind;
}
/* NOTE: this function is deprecated, but we cannot move it to deprecated.c
because we need the SIZEOF_*_PREFIX constant symbols defined above. */
svn_prop_kind_t
svn_property_kind(int *prefix_len,
const char *prop_name)
{
svn_prop_kind_t kind = svn_property_kind2(prop_name);
if (prefix_len)
{
if (kind == svn_prop_wc_kind)
*prefix_len = SIZEOF_WC_PREFIX;
else if (kind == svn_prop_entry_kind)
*prefix_len = SIZEOF_ENTRY_PREFIX;
else
*prefix_len = 0;
}
return kind;
}
svn_error_t *
svn_categorize_props(const apr_array_header_t *proplist,
apr_array_header_t **entry_props,
apr_array_header_t **wc_props,
apr_array_header_t **regular_props,
apr_pool_t *pool)
{
int i;
if (entry_props)
*entry_props = apr_array_make(pool, 1, sizeof(svn_prop_t));
if (wc_props)
*wc_props = apr_array_make(pool, 1, sizeof(svn_prop_t));
if (regular_props)
*regular_props = apr_array_make(pool, 1, sizeof(svn_prop_t));
for (i = 0; i < proplist->nelts; i++)
{
svn_prop_t *prop, *newprop;
enum svn_prop_kind kind;
prop = &APR_ARRAY_IDX(proplist, i, svn_prop_t);
kind = svn_property_kind2(prop->name);
newprop = NULL;
if (kind == svn_prop_regular_kind)
{
if (regular_props)
newprop = apr_array_push(*regular_props);
}
else if (kind == svn_prop_wc_kind)
{
if (wc_props)
newprop = apr_array_push(*wc_props);
}
else if (kind == svn_prop_entry_kind)
{
if (entry_props)
newprop = apr_array_push(*entry_props);
}
else
/* Technically this can't happen, but might as well have the
code ready in case that ever changes. */
return svn_error_createf(SVN_ERR_BAD_PROP_KIND, NULL,
"Bad property kind for property '%s'",
prop->name);
if (newprop)
{
newprop->name = prop->name;
newprop->value = prop->value;
}
}
return SVN_NO_ERROR;
}
svn_error_t *
svn_prop_diffs(apr_array_header_t **propdiffs,
const apr_hash_t *target_props,
const apr_hash_t *source_props,
apr_pool_t *pool)
{
apr_hash_index_t *hi;
apr_array_header_t *ary = apr_array_make(pool, 1, sizeof(svn_prop_t));
/* Note: we will be storing the pointers to the keys (from the hashes)
into the propdiffs array. It is acceptable for us to
reference the same memory as the base/target_props hash. */
/* Loop over SOURCE_PROPS and examine each key. This will allow us to
detect any `deletion' events or `set-modification' events. */
for (hi = apr_hash_first(pool, (apr_hash_t *)source_props); hi;
hi = apr_hash_next(hi))
{
const void *key;
apr_ssize_t klen;
void *val;
const svn_string_t *propval1, *propval2;
/* Get next property */
apr_hash_this(hi, &key, &klen, &val);
propval1 = val;
/* Does property name exist in TARGET_PROPS? */
propval2 = apr_hash_get((apr_hash_t *)target_props, key, klen);
if (propval2 == NULL)
{
/* Add a delete event to the array */
svn_prop_t *p = apr_array_push(ary);
p->name = key;
p->value = NULL;
}
else if (! svn_string_compare(propval1, propval2))
{
/* Add a set (modification) event to the array */
svn_prop_t *p = apr_array_push(ary);
p->name = key;
p->value = svn_string_dup(propval2, pool);
}
}
/* Loop over TARGET_PROPS and examine each key. This allows us to
detect `set-creation' events */
for (hi = apr_hash_first(pool, (apr_hash_t *)target_props); hi;
hi = apr_hash_next(hi))
{
const void *key;
apr_ssize_t klen;
void *val;
const svn_string_t *propval;
/* Get next property */
apr_hash_this(hi, &key, &klen, &val);
propval = val;
/* Does property name exist in SOURCE_PROPS? */
if (NULL == apr_hash_get((apr_hash_t *)source_props, key, klen))
{
/* Add a set (creation) event to the array */
svn_prop_t *p = apr_array_push(ary);
p->name = key;
p->value = svn_string_dup(propval, pool);
}
}
/* Done building our array of user events. */
*propdiffs = ary;
return SVN_NO_ERROR;
}
apr_hash_t *
svn_prop__patch(const apr_hash_t *original_props,
const apr_array_header_t *prop_changes,
apr_pool_t *pool)
{
apr_hash_t *props = apr_hash_copy(pool, original_props);
int i;
for (i = 0; i < prop_changes->nelts; i++)
{
const svn_prop_t *p = &APR_ARRAY_IDX(prop_changes, i, svn_prop_t);
svn_hash_sets(props, p->name, p->value);
}
return props;
}
/**
* Reallocate the members of PROP using POOL.
*/
static void
svn_prop__members_dup(svn_prop_t *prop, apr_pool_t *pool)
{
if (prop->name)
prop->name = apr_pstrdup(pool, prop->name);
if (prop->value)
prop->value = svn_string_dup(prop->value, pool);
}
svn_prop_t *
svn_prop_dup(const svn_prop_t *prop, apr_pool_t *pool)
{
svn_prop_t *new_prop = apr_palloc(pool, sizeof(*new_prop));
*new_prop = *prop;
svn_prop__members_dup(new_prop, pool);
return new_prop;
}
apr_array_header_t *
svn_prop_array_dup(const apr_array_header_t *array, apr_pool_t *pool)
{
int i;
apr_array_header_t *new_array = apr_array_copy(pool, array);
for (i = 0; i < new_array->nelts; ++i)
{
svn_prop_t *elt = &APR_ARRAY_IDX(new_array, i, svn_prop_t);
svn_prop__members_dup(elt, pool);
}
return new_array;
}
apr_array_header_t *
svn_prop_hash_to_array(const apr_hash_t *hash,
apr_pool_t *pool)
{
apr_hash_index_t *hi;
apr_array_header_t *array = apr_array_make(pool,
apr_hash_count((apr_hash_t *)hash),
sizeof(svn_prop_t));
for (hi = apr_hash_first(pool, (apr_hash_t *)hash); hi;
hi = apr_hash_next(hi))
{
const void *key;
void *val;
svn_prop_t prop;
apr_hash_this(hi, &key, NULL, &val);
prop.name = key;
prop.value = val;
APR_ARRAY_PUSH(array, svn_prop_t) = prop;
}
return array;
}
apr_hash_t *
svn_prop_hash_dup(const apr_hash_t *hash,
apr_pool_t *pool)
{
apr_hash_index_t *hi;
apr_hash_t *new_hash = apr_hash_make(pool);
for (hi = apr_hash_first(pool, (apr_hash_t *)hash); hi;
hi = apr_hash_next(hi))
{
const void *key;
apr_ssize_t klen;
void *prop;
apr_hash_this(hi, &key, &klen, &prop);
apr_hash_set(new_hash, apr_pstrmemdup(pool, key, klen), klen,
svn_string_dup(prop, pool));
}
return new_hash;
}
apr_hash_t *
svn_prop_array_to_hash(const apr_array_header_t *properties,
apr_pool_t *pool)
{
int i;
apr_hash_t *prop_hash = apr_hash_make(pool);
for (i = 0; i < properties->nelts; i++)
{
const svn_prop_t *prop = &APR_ARRAY_IDX(properties, i, svn_prop_t);
svn_hash_sets(prop_hash, prop->name, prop->value);
}
return prop_hash;
}
svn_boolean_t
svn_prop_is_boolean(const char *prop_name)
{
/* If we end up with more than 3 of these, we should probably put
them in a table and use bsearch. With only three, it doesn't
make any speed difference. */
if (strcmp(prop_name, SVN_PROP_EXECUTABLE) == 0
|| strcmp(prop_name, SVN_PROP_NEEDS_LOCK) == 0
|| strcmp(prop_name, SVN_PROP_SPECIAL) == 0)
return TRUE;
return FALSE;
}
svn_boolean_t
svn_prop_needs_translation(const char *propname)
{
/* ### Someday, we may want to be picky and choosy about which
properties require UTF8 and EOL conversion. For now, all "svn:"
props need it. */
return svn_prop_is_svn_prop(propname);
}
svn_boolean_t
svn_prop_name_is_valid(const char *prop_name)
{
const char *p = prop_name;
/* The characters we allow use identical representations in UTF8
and ASCII, so we can just test for the appropriate ASCII codes.
But we can't use standard C character notation ('A', 'B', etc)
because there's no guarantee that this C environment is using
ASCII. */
if (!(svn_ctype_isalpha(*p)
|| *p == SVN_CTYPE_ASCII_COLON
|| *p == SVN_CTYPE_ASCII_UNDERSCORE))
return FALSE;
p++;
for (; *p; p++)
{
if (!(svn_ctype_isalnum(*p)
|| *p == SVN_CTYPE_ASCII_MINUS
|| *p == SVN_CTYPE_ASCII_DOT
|| *p == SVN_CTYPE_ASCII_COLON
|| *p == SVN_CTYPE_ASCII_UNDERSCORE))
return FALSE;
}
return TRUE;
}
const char *
svn_prop_get_value(const apr_hash_t *props,
const char *prop_name)
{
svn_string_t *str;
if (!props)
return NULL;
str = svn_hash_gets((apr_hash_t *)props, prop_name);
if (str)
return str->data;
return NULL;
}
| {
"content_hash": "61d62fd1b05f47072c51e4ce15acd293",
"timestamp": "",
"source": "github",
"line_count": 487,
"max_line_length": 80,
"avg_line_length": 26.49486652977413,
"alnum_prop": 0.575680074401302,
"repo_name": "jrobhoward/SCADAbase",
"id": "738d00f16682ed833dae1f184affcb3cbe018657",
"size": "13956",
"binary": false,
"copies": "6",
"ref": "refs/heads/SCADAbase",
"path": "contrib/subversion/subversion/libsvn_subr/properties.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "62471"
},
{
"name": "Assembly",
"bytes": "4615704"
},
{
"name": "Awk",
"bytes": "273794"
},
{
"name": "Batchfile",
"bytes": "20333"
},
{
"name": "C",
"bytes": "457666547"
},
{
"name": "C++",
"bytes": "91495356"
},
{
"name": "CMake",
"bytes": "17632"
},
{
"name": "CSS",
"bytes": "104220"
},
{
"name": "ChucK",
"bytes": "39"
},
{
"name": "D",
"bytes": "6321"
},
{
"name": "DIGITAL Command Language",
"bytes": "10638"
},
{
"name": "DTrace",
"bytes": "1904158"
},
{
"name": "Emacs Lisp",
"bytes": "32010"
},
{
"name": "EmberScript",
"bytes": "286"
},
{
"name": "Forth",
"bytes": "204603"
},
{
"name": "GAP",
"bytes": "72078"
},
{
"name": "Groff",
"bytes": "32376243"
},
{
"name": "HTML",
"bytes": "5776268"
},
{
"name": "Haskell",
"bytes": "2458"
},
{
"name": "IGOR Pro",
"bytes": "6510"
},
{
"name": "Java",
"bytes": "112547"
},
{
"name": "KRL",
"bytes": "4950"
},
{
"name": "Lex",
"bytes": "425858"
},
{
"name": "Limbo",
"bytes": "4037"
},
{
"name": "Logos",
"bytes": "179088"
},
{
"name": "Makefile",
"bytes": "12750766"
},
{
"name": "Mathematica",
"bytes": "21782"
},
{
"name": "Max",
"bytes": "4105"
},
{
"name": "Module Management System",
"bytes": "816"
},
{
"name": "Objective-C",
"bytes": "1571960"
},
{
"name": "PHP",
"bytes": "2471"
},
{
"name": "PLSQL",
"bytes": "96552"
},
{
"name": "PLpgSQL",
"bytes": "2212"
},
{
"name": "Perl",
"bytes": "3947402"
},
{
"name": "Perl6",
"bytes": "122803"
},
{
"name": "PostScript",
"bytes": "152255"
},
{
"name": "Prolog",
"bytes": "42792"
},
{
"name": "Protocol Buffer",
"bytes": "54964"
},
{
"name": "Python",
"bytes": "381066"
},
{
"name": "R",
"bytes": "764"
},
{
"name": "Rebol",
"bytes": "738"
},
{
"name": "Ruby",
"bytes": "67015"
},
{
"name": "Scheme",
"bytes": "5087"
},
{
"name": "Scilab",
"bytes": "196"
},
{
"name": "Shell",
"bytes": "10963470"
},
{
"name": "SourcePawn",
"bytes": "2293"
},
{
"name": "SuperCollider",
"bytes": "80208"
},
{
"name": "Tcl",
"bytes": "7102"
},
{
"name": "TeX",
"bytes": "720582"
},
{
"name": "VimL",
"bytes": "19597"
},
{
"name": "XS",
"bytes": "17496"
},
{
"name": "XSLT",
"bytes": "4564"
},
{
"name": "Yacc",
"bytes": "1881915"
}
],
"symlink_target": ""
} |
<!-- BEGINNING OF PRE-COMMIT-BLUEPRINT DOCS HOOK:TITLE -->
# Cloud DNS Managed Zone Forwarding blueprint
<!-- END OF PRE-COMMIT-BLUEPRINT DOCS HOOK:TITLE -->
<!-- BEGINNING OF PRE-COMMIT-BLUEPRINT DOCS HOOK:BODY -->
A private Cloud DNS managed zone with forwarding config
## Setters
| Name | Value | Type | Count |
|---------------------------|----------------------|------|-------|
| domain | example.com. | str | 1 |
| forwarding-target-address | 192.168.0.1 | str | 1 |
| managed-zone-name | private-managed-zone | str | 1 |
| namespace | networking | str | 2 |
| network-name | example-network | str | 1 |
| project-id | project-id | str | 3 |
## Sub-packages
This package has no sub-packages.
## Resources
| File | APIVersion | Kind | Name | Namespace |
|---------------|--------------------------------------------|----------------|-----------------------|------------|
| dns.yaml | dns.cnrm.cloud.google.com/v1beta1 | DNSManagedZone | dnsmanagedzone-sample | networking |
| services.yaml | serviceusage.cnrm.cloud.google.com/v1beta1 | Service | project-id-dns | projects |
## Resource References
- [DNSManagedZone](https://cloud.google.com/config-connector/docs/reference/resource-docs/dns/dnsmanagedzone)
- [Service](https://cloud.google.com/config-connector/docs/reference/resource-docs/serviceusage/service)
## Usage
1. Clone the package:
```shell
kpt pkg get https://github.com/GoogleCloudPlatform/blueprints.git/catalog/networking/dns/managedzone-forwarding@${VERSION}
```
Replace `${VERSION}` with the desired repo branch or tag
(for example, `main`).
1. Move into the local package:
```shell
cd "./managedzone-forwarding/"
```
1. Edit the function config file(s):
- setters.yaml
1. Execute the function pipeline
```shell
kpt fn render
```
1. Initialize the resource inventory
```shell
kpt live init --namespace ${NAMESPACE}"
```
Replace `${NAMESPACE}` with the namespace in which to manage
the inventory ResourceGroup (for example, `config-control`).
1. Apply the package resources to your cluster
```shell
kpt live apply
```
1. Wait for the resources to be ready
```shell
kpt live status --output table --poll-until current
```
<!-- END OF PRE-COMMIT-BLUEPRINT DOCS HOOK:BODY -->
| {
"content_hash": "ba017a081efa431ad69e8429440d3585",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 126,
"avg_line_length": 34.54666666666667,
"alnum_prop": 0.5650328058664609,
"repo_name": "GoogleCloudPlatform/blueprints",
"id": "39c778f0eb0aa460fe22c9093cf2162352066de7",
"size": "2591",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "catalog/networking/dns/managedzone-forwarding/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "7461"
},
{
"name": "Makefile",
"bytes": "3042"
},
{
"name": "Shell",
"bytes": "21187"
}
],
"symlink_target": ""
} |
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"DTD/xhtml1-strict.dtd">
<html>
<head>
<title>API docs for “pymine.util.httpserve”</title>
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" />
<link href="apidocs.css" type="text/css" rel="stylesheet" />
</head>
<body>
<h1 class="module">Module p.u.httpserve</h1>
<p>
<span id="part">Part of <a href="pymine.html">pymine</a>.<a href="pymine.util.html">util</a></span>
</p>
<div>
</div>
<div>http://docs.djangoproject.com/en/dev/howto/static-files/ says:</p>
<p><QUOTE>Django itself doesn't serve static (media) files, such as
images, style sheets, or video. It leaves that job to whichever Web server
you choose. The reasoning here is that standard Web servers, such as
Apache, lighttpd and Cherokee, are much more fine-tuned at serving static
files than a Web application framework.</QUOTE></p>
<p>...which is fine, but which doesn't actually help us when Django is
being used to implement potentially hundreds of mini-websites with their
own novel forms of authentication and where you don't want management
overhead of keeping (documenting?) how to synchronise their authentication
needs with [INSERT NAME OF PREFERRED WEBSERVER DU JOUR].</p>
<p>See also: http://code.djangoproject.com/ticket/2131#comment:2</p>
<p><QUOTE>Django isn't meant to serve static files, so I'm marking
this as a wontfix.</QUOTE></p>
<p>So in the face of those wanting to nanny us into "proper
behaviour", regrettably we have to roll our own.</p>
<p>We are allowed to take the performance hit, because the point is to have
"one mine per user spattered all over the world" rather than
"bazillion mines all at one hosting company which subsequently suffers
performance issues".<table class="fieldTable"></table></div>
<div id="splitTables">
<table class="children sortable" id="id153">
<tr class="function">
<td>Function</td>
<td><a href="pymine.util.httpserve.html#cleanpath">cleanpath</a></td>
<td><span class="undocumented">Undocumented</span></td>
</tr><tr class="function">
<td>Function</td>
<td><a href="pymine.util.httpserve.html#httpserve_error">httpserve_error</a></td>
<td><span class="undocumented">Undocumented</span></td>
</tr><tr class="function">
<td>Function</td>
<td><a href="pymine.util.httpserve.html#httpserve_file">httpserve_file</a></td>
<td><span class="undocumented">Undocumented</span></td>
</tr><tr class="function">
<td>Function</td>
<td><a href="pymine.util.httpserve.html#httpserve_directory">httpserve_directory</a></td>
<td><span class="undocumented">Undocumented</span></td>
</tr><tr class="function">
<td>Function</td>
<td><a href="pymine.util.httpserve.html#httpserve_path">httpserve_path</a></td>
<td><span class="undocumented">Undocumented</span></td>
</tr>
</table>
</div>
<div class="function">
<a name="pymine.util.httpserve.cleanpath"></a>
<a name="cleanpath"></a>
<div class="functionHeader">
def cleanpath(old):
</div>
<div class="functionBody">
<div class="undocumented">Undocumented</div>
</div>
</div><div class="function">
<a name="pymine.util.httpserve.httpserve_error"></a>
<a name="httpserve_error"></a>
<div class="functionHeader">
def httpserve_error(url_path):
</div>
<div class="functionBody">
<div class="undocumented">Undocumented</div>
</div>
</div><div class="function">
<a name="pymine.util.httpserve.httpserve_file"></a>
<a name="httpserve_file"></a>
<div class="functionHeader">
def httpserve_file(file_path, content_type):
</div>
<div class="functionBody">
<div class="undocumented">Undocumented</div>
</div>
</div><div class="function">
<a name="pymine.util.httpserve.httpserve_directory"></a>
<a name="httpserve_directory"></a>
<div class="functionHeader">
def httpserve_directory(file_path):
</div>
<div class="functionBody">
<div class="undocumented">Undocumented</div>
</div>
</div><div class="function">
<a name="pymine.util.httpserve.httpserve_path"></a>
<a name="httpserve_path"></a>
<div class="functionHeader">
def httpserve_path(request, orig_path):
</div>
<div class="functionBody">
<div class="undocumented">Undocumented</div>
</div>
</div>
<address>
<a href="index.html">API Documentation</a> for pymine, generated by <a href="http://codespeak.net/~mwh/pydoctor/">pydoctor</a> at 2010-04-07 23:15:24.
</address>
</body>
</html> | {
"content_hash": "d19023430c4d0c8c2fd172b82af9febc",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 156,
"avg_line_length": 33.05806451612903,
"alnum_prop": 0.6133879781420765,
"repo_name": "alecmuffett/pymine",
"id": "57ddbbb4fa1abb205eff3289a6a5e24d83d5d3f2",
"size": "5124",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "public_html/apidocs/pymine.util.httpserve.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "28284"
},
{
"name": "HTML",
"bytes": "1067540"
},
{
"name": "JavaScript",
"bytes": "37288"
},
{
"name": "Makefile",
"bytes": "2492"
},
{
"name": "Perl",
"bytes": "23464"
},
{
"name": "Python",
"bytes": "466492"
},
{
"name": "Shell",
"bytes": "9425"
}
],
"symlink_target": ""
} |
<resources>
<!-- Default screen margins, per the Android Design guidelines. -->
<dimen name="activity_horizontal_margin">16dp</dimen>
<dimen name="activity_vertical_margin">16dp</dimen>
<dimen name="padding_medium">5dp</dimen>
<dimen name="padding_small">2dp</dimen>
<dimen name="margin_medium">5dp</dimen>
<dimen name="margin_small">2dp</dimen>
<dimen name="padding_tiny">1dp</dimen>
<dimen name="padding_large">7dp</dimen>
<dimen name="margin_large">10dp</dimen>
</resources>
| {
"content_hash": "6b2eb3bdeab429e733c9046f345c34b1",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 71,
"avg_line_length": 37.142857142857146,
"alnum_prop": 0.675,
"repo_name": "pj2/trail-app",
"id": "60d52b22bc4fc1c5739f40864134b964e53b0ed8",
"size": "520",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "res/values/dimens.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "65433"
}
],
"symlink_target": ""
} |
using System.Composition;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeFixes;
using Roslyn.Diagnostics.Analyzers;
namespace Roslyn.Diagnostics.CSharp.Analyzers
{
/// <summary>
/// RS0006: Do not mix attributes from different versions of MEF
/// </summary>
[ExportCodeFixProvider(LanguageNames.CSharp), Shared]
public class CSharpDoNotMixAttributesFromDifferentVersionsOfMEFFixer : DoNotMixAttributesFromDifferentVersionsOfMEFFixer
{
}
} | {
"content_hash": "bc0df614e793d770c6f4092af0fb25f2",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 124,
"avg_line_length": 32.2,
"alnum_prop": 0.7867494824016563,
"repo_name": "mavasani/roslyn-analyzers",
"id": "b148176e95c3fd788c3d1bdd88d0885a8a645e1b",
"size": "645",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/Roslyn.Diagnostics.Analyzers/CSharp/CSharpDoNotMixAttributesFromDifferentVersionsOfMEF.Fixer.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5365"
},
{
"name": "C#",
"bytes": "13544666"
},
{
"name": "CMake",
"bytes": "9446"
},
{
"name": "PowerShell",
"bytes": "160438"
},
{
"name": "Rich Text Format",
"bytes": "123141"
},
{
"name": "Shell",
"bytes": "107224"
},
{
"name": "Smalltalk",
"bytes": "705"
},
{
"name": "Vim Snippet",
"bytes": "2225"
},
{
"name": "Visual Basic .NET",
"bytes": "195294"
}
],
"symlink_target": ""
} |
gen_third_cert() {
[ -f "${LXD_CONF}/client3.crt" ] && return
mv "${LXD_CONF}/client.crt" "${LXD_CONF}/client.crt.bak"
mv "${LXD_CONF}/client.key" "${LXD_CONF}/client.key.bak"
lxc_remote list > /dev/null 2>&1
mv "${LXD_CONF}/client.crt" "${LXD_CONF}/client3.crt"
mv "${LXD_CONF}/client.key" "${LXD_CONF}/client3.key"
mv "${LXD_CONF}/client.crt.bak" "${LXD_CONF}/client.crt"
mv "${LXD_CONF}/client.key.bak" "${LXD_CONF}/client.key"
}
test_basic_usage() {
# shellcheck disable=2039
local lxd_backend
lxd_backend=$(storage_backend "$LXD_DIR")
ensure_import_testimage
ensure_has_localhost_remote "${LXD_ADDR}"
# Test image export
sum=$(lxc image info testimage | grep ^Fingerprint | cut -d' ' -f2)
lxc image export testimage "${LXD_DIR}/"
[ "${sum}" = "$(sha256sum "${LXD_DIR}/${sum}.tar.xz" | cut -d' ' -f1)" ]
# Test an alias with slashes
lxc image show "${sum}"
lxc image alias create a/b/ "${sum}"
lxc image alias delete a/b/
# Test alias list filtering
lxc image alias create foo "${sum}"
lxc image alias create bar "${sum}"
lxc image alias list local: | grep -q foo
lxc image alias list local: | grep -q bar
lxc image alias list local: foo | grep -q -v bar
lxc image alias list local: "${sum}" | grep -q foo
lxc image alias list local: non-existent | grep -q -v non-existent
lxc image alias delete foo
lxc image alias delete bar
# Test image delete
lxc image delete testimage
# test GET /1.0, since the client always puts to /1.0/
my_curl -f -X GET "https://${LXD_ADDR}/1.0"
my_curl -f -X GET "https://${LXD_ADDR}/1.0/containers"
# Re-import the image
mv "${LXD_DIR}/${sum}.tar.xz" "${LXD_DIR}/testimage.tar.xz"
lxc image import "${LXD_DIR}/testimage.tar.xz" --alias testimage user.foo=bar
lxc image show testimage | grep -q "user.foo: bar"
lxc image delete testimage
lxc image import "${LXD_DIR}/testimage.tar.xz" --alias testimage
rm "${LXD_DIR}/testimage.tar.xz"
# Test filename for image export
lxc image export testimage "${LXD_DIR}/"
[ "${sum}" = "$(sha256sum "${LXD_DIR}/${sum}.tar.xz" | cut -d' ' -f1)" ]
rm "${LXD_DIR}/${sum}.tar.xz"
# Test custom filename for image export
lxc image export testimage "${LXD_DIR}/foo"
[ "${sum}" = "$(sha256sum "${LXD_DIR}/foo.tar.xz" | cut -d' ' -f1)" ]
rm "${LXD_DIR}/foo.tar.xz"
# Test image export with a split image.
deps/import-busybox --split --alias splitimage
sum=$(lxc image info splitimage | grep ^Fingerprint | cut -d' ' -f2)
lxc image export splitimage "${LXD_DIR}"
[ "${sum}" = "$(cat "${LXD_DIR}/meta-${sum}.tar.xz" "${LXD_DIR}/${sum}.tar.xz" | sha256sum | cut -d' ' -f1)" ]
# Delete the split image and exported files
rm "${LXD_DIR}/${sum}.tar.xz"
rm "${LXD_DIR}/meta-${sum}.tar.xz"
lxc image delete splitimage
# Redo the split image export test, this time with the --filename flag
# to tell import-busybox to set the 'busybox' filename in the upload.
# The sum should remain the same as its the same image.
deps/import-busybox --split --filename --alias splitimage
lxc image export splitimage "${LXD_DIR}"
[ "${sum}" = "$(cat "${LXD_DIR}/meta-${sum}.tar.xz" "${LXD_DIR}/${sum}.tar.xz" | sha256sum | cut -d' ' -f1)" ]
# Delete the split image and exported files
rm "${LXD_DIR}/${sum}.tar.xz"
rm "${LXD_DIR}/meta-${sum}.tar.xz"
lxc image delete splitimage
# Test container creation
lxc init testimage foo
lxc list | grep foo | grep STOPPED
lxc list fo | grep foo | grep STOPPED
# Test list json format
lxc list --format json | jq '.[]|select(.name="foo")' | grep '"name": "foo"'
# Test list with --columns and --fast
! lxc list --columns=nsp --fast
# Test container rename
lxc move foo bar
lxc list | grep -v foo
lxc list | grep bar
# Test container copy
lxc copy bar foo
lxc delete foo
# gen untrusted cert
gen_third_cert
# don't allow requests without a cert to get trusted data
curl -k -s -X GET "https://${LXD_ADDR}/1.0/containers/foo" | grep 403
# Test unprivileged container publish
lxc publish bar --alias=foo-image prop1=val1
lxc image show foo-image | grep val1
curl -k -s --cert "${LXD_CONF}/client3.crt" --key "${LXD_CONF}/client3.key" -X GET "https://${LXD_ADDR}/1.0/images" | grep "/1.0/images/" && false
lxc image delete foo-image
# Test container publish with existing alias
lxc publish bar --alias=foo-image --alias=foo-image2
lxc launch testimage baz
# change the container filesystem so the resulting image is different
lxc exec baz touch /somefile
lxc stop baz --force
# publishing another image with same alias doesn't fail
lxc publish baz --alias=foo-image
lxc delete baz
lxc image delete foo-image foo-image2
# Test privileged container publish
lxc profile create priv
lxc profile set priv security.privileged true
lxc init testimage barpriv -p default -p priv
lxc publish barpriv --alias=foo-image prop1=val1
lxc image show foo-image | grep val1
curl -k -s --cert "${LXD_CONF}/client3.crt" --key "${LXD_CONF}/client3.key" -X GET "https://${LXD_ADDR}/1.0/images" | grep "/1.0/images/" && false
lxc image delete foo-image
lxc delete barpriv
lxc profile delete priv
# Test that containers without metadata.yaml are published successfully.
# Note that this quick hack won't work for LVM, since it doesn't always mount
# the container's filesystem. That's ok though: the logic we're trying to
# test here is independent of storage backend, so running it for just one
# backend (or all non-lvm backends) is enough.
if [ "$lxd_backend" = "lvm" ]; then
lxc init testimage nometadata
rm -f "${LXD_DIR}/containers/nometadata/metadata.yaml"
lxc publish nometadata --alias=nometadata-image
lxc image delete nometadata-image
lxc delete nometadata
fi
# Test public images
lxc publish --public bar --alias=foo-image2
curl -k -s --cert "${LXD_CONF}/client3.crt" --key "${LXD_CONF}/client3.key" -X GET "https://${LXD_ADDR}/1.0/images" | grep "/1.0/images/"
lxc image delete foo-image2
# Test invalid container names
! lxc init testimage -abc
! lxc init testimage abc-
! lxc init testimage 1234
! lxc init testimage 12test
! lxc init testimage a_b_c
! lxc init testimage aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
# Test snapshot publish
lxc snapshot bar
lxc publish bar/snap0 --alias foo
lxc init foo bar2
lxc list | grep bar2
lxc delete bar2
lxc image delete foo
# test basic alias support
printf "aliases:\n ls: list" >> "${LXD_CONF}/config.yml"
lxc ls
# Delete the bar container we've used for several tests
lxc delete bar
# lxc delete should also delete all snapshots of bar
[ ! -d "${LXD_DIR}/snapshots/bar" ]
# Test randomly named container creation
lxc launch testimage
RDNAME=$(lxc list | tail -n2 | grep ^\| | awk '{print $2}')
lxc delete -f "${RDNAME}"
# Test "nonetype" container creation
wait_for "${LXD_ADDR}" my_curl -X POST "https://${LXD_ADDR}/1.0/containers" \
-d "{\"name\":\"nonetype\",\"source\":{\"type\":\"none\"}}"
lxc delete nonetype
# Test "nonetype" container creation with an LXC config
wait_for "${LXD_ADDR}" my_curl -X POST "https://${LXD_ADDR}/1.0/containers" \
-d "{\"name\":\"configtest\",\"config\":{\"raw.lxc\":\"lxc.hook.clone=/bin/true\"},\"source\":{\"type\":\"none\"}}"
# shellcheck disable=SC2102
[ "$(my_curl "https://${LXD_ADDR}/1.0/containers/configtest" | jq -r .metadata.config[\"raw.lxc\"])" = "lxc.hook.clone=/bin/true" ]
lxc delete configtest
# Test activateifneeded/shutdown
LXD_ACTIVATION_DIR=$(mktemp -d -p "${TEST_DIR}" XXX)
chmod +x "${LXD_ACTIVATION_DIR}"
spawn_lxd "${LXD_ACTIVATION_DIR}"
(
set -e
# shellcheck disable=SC2030
LXD_DIR=${LXD_ACTIVATION_DIR}
ensure_import_testimage
lxd activateifneeded --debug 2>&1 | grep -q "Daemon has core.https_address set, activating..."
lxc config unset core.https_address --force-local
lxd activateifneeded --debug 2>&1 | grep -q -v "activating..."
lxc init testimage autostart --force-local
lxd activateifneeded --debug 2>&1 | grep -q -v "activating..."
lxc config set autostart boot.autostart true --force-local
lxd activateifneeded --debug 2>&1 | grep -q "Daemon has auto-started containers, activating..."
lxc config unset autostart boot.autostart --force-local
lxd activateifneeded --debug 2>&1 | grep -q -v "activating..."
lxc start autostart --force-local
PID=$(lxc info autostart --force-local | grep ^Pid | awk '{print $2}')
shutdown_lxd "${LXD_DIR}"
[ -d "/proc/${PID}" ] && false
lxd activateifneeded --debug 2>&1 | grep -q "Daemon has auto-started containers, activating..."
# shellcheck disable=SC2031
respawn_lxd "${LXD_DIR}"
lxc list --force-local autostart | grep -q RUNNING
lxc delete autostart --force --force-local
)
# shellcheck disable=SC2031
LXD_DIR=${LXD_DIR}
kill_lxd "${LXD_ACTIVATION_DIR}"
# Create and start a container
lxc launch testimage foo
lxc list | grep foo | grep RUNNING
lxc stop foo --force # stop is hanging
# cycle it a few times
lxc start foo
mac1=$(lxc exec foo cat /sys/class/net/eth0/address)
lxc stop foo --force # stop is hanging
lxc start foo
mac2=$(lxc exec foo cat /sys/class/net/eth0/address)
if [ -n "${mac1}" ] && [ -n "${mac2}" ] && [ "${mac1}" != "${mac2}" ]; then
echo "==> MAC addresses didn't match across restarts (${mac1} vs ${mac2})"
false
fi
# Test instance types
lxc launch testimage test-limits -t c0.5-m0.2
[ "$(lxc config get test-limits limits.cpu)" = "1" ]
[ "$(lxc config get test-limits limits.cpu.allowance)" = "50%" ]
[ "$(lxc config get test-limits limits.memory)" = "204MB" ]
lxc delete -f test-limits
# check that we can set the environment
lxc exec foo pwd | grep /root
lxc exec --env BEST_BAND=meshuggah foo env | grep meshuggah
lxc exec foo ip link show | grep eth0
# check that we can get the return code for a non- wait-for-websocket exec
op=$(my_curl -X POST "https://${LXD_ADDR}/1.0/containers/foo/exec" -d '{"command": ["sleep", "1"], "environment": {}, "wait-for-websocket": false, "interactive": false}' | jq -r .operation)
[ "$(my_curl "https://${LXD_ADDR}${op}/wait" | jq -r .metadata.metadata.return)" != "null" ]
# test file transfer
echo abc > "${LXD_DIR}/in"
lxc file push "${LXD_DIR}/in" foo/root/
lxc exec foo /bin/cat /root/in | grep abc
lxc exec foo -- /bin/rm -f root/in
lxc file push "${LXD_DIR}/in" foo/root/in1
lxc exec foo /bin/cat /root/in1 | grep abc
lxc exec foo -- /bin/rm -f root/in1
# test lxc file edit doesn't change target file's owner and permissions
echo "content" | lxc file push - foo/tmp/edit_test
lxc exec foo -- chown 55.55 /tmp/edit_test
lxc exec foo -- chmod 555 /tmp/edit_test
echo "new content" | lxc file edit foo/tmp/edit_test
[ "$(lxc exec foo -- cat /tmp/edit_test)" = "new content" ]
[ "$(lxc exec foo -- stat -c \"%u %g %a\" /tmp/edit_test)" = "55 55 555" ]
# make sure stdin is chowned to our container root uid (Issue #590)
[ -t 0 ] && [ -t 1 ] && lxc exec foo -- chown 1000:1000 /proc/self/fd/0
echo foo | lxc exec foo tee /tmp/foo
# Detect regressions/hangs in exec
sum=$(ps aux | tee "${LXD_DIR}/out" | lxc exec foo md5sum | cut -d' ' -f1)
[ "${sum}" = "$(md5sum "${LXD_DIR}/out" | cut -d' ' -f1)" ]
rm "${LXD_DIR}/out"
# FIXME: make this backend agnostic
if [ "$lxd_backend" = "dir" ]; then
content=$(cat "${LXD_DIR}/containers/foo/rootfs/tmp/foo")
[ "${content}" = "foo" ]
fi
lxc launch testimage deleterunning
my_curl -X DELETE "https://${LXD_ADDR}/1.0/containers/deleterunning" | grep "container is running"
lxc delete deleterunning -f
# cleanup
lxc delete foo -f
if [ -e /sys/module/apparmor/ ]; then
# check that an apparmor profile is created for this container, that it is
# unloaded on stop, and that it is deleted when the container is deleted
lxc launch testimage lxd-apparmor-test
MAJOR=0
MINOR=0
if [ -f /sys/kernel/security/apparmor/features/domain/version ]; then
MAJOR=$(awk -F. '{print $1}' < /sys/kernel/security/apparmor/features/domain/version)
MINOR=$(awk -F. '{print $2}' < /sys/kernel/security/apparmor/features/domain/version)
fi
if [ "${MAJOR}" -gt "1" ] || ([ "${MAJOR}" = "1" ] && [ "${MINOR}" -ge "2" ]); then
aa_namespace="lxd-lxd-apparmor-test_<$(echo "${LXD_DIR}" | sed -e 's/\//-/g' -e 's/^.//')>"
aa-status | grep -q ":${aa_namespace}:unconfined" || aa-status | grep -q ":${aa_namespace}://unconfined"
lxc stop lxd-apparmor-test --force
! aa-status | grep -q ":${aa_namespace}:" || false
else
aa-status | grep "lxd-lxd-apparmor-test_<${LXD_DIR}>"
lxc stop lxd-apparmor-test --force
! aa-status | grep -q "lxd-lxd-apparmor-test_<${LXD_DIR}>" || false
fi
lxc delete lxd-apparmor-test
[ ! -f "${LXD_DIR}/security/apparmor/profiles/lxd-lxd-apparmor-test" ]
else
echo "==> SKIP: apparmor tests (missing kernel support)"
fi
# make sure that privileged containers are not world-readable
lxc profile create unconfined
lxc profile set unconfined security.privileged true
lxc init testimage foo2 -p unconfined
[ "$(stat -L -c "%a" "${LXD_DIR}/containers/foo2")" = "700" ]
lxc delete foo2
lxc profile delete unconfined
# Ephemeral
lxc launch testimage foo -e
OLD_INIT=$(lxc info foo | grep ^Pid)
lxc exec foo reboot || true
REBOOTED="false"
# shellcheck disable=SC2034
for i in $(seq 20); do
NEW_INIT=$(lxc info foo | grep ^Pid || true)
if [ -n "${NEW_INIT}" ] && [ "${OLD_INIT}" != "${NEW_INIT}" ]; then
REBOOTED="true"
break
fi
sleep 0.5
done
[ "${REBOOTED}" = "true" ]
# Workaround for LXC bug which causes LXD to double-start containers
# on reboot
sleep 2
lxc stop foo --force || true
! lxc list | grep -q foo || false
}
| {
"content_hash": "29e690cfbdf3689ba0faa7d27aaa87f8",
"timestamp": "",
"source": "github",
"line_count": 386,
"max_line_length": 191,
"avg_line_length": 36.26943005181347,
"alnum_prop": 0.6502857142857142,
"repo_name": "lxc/lxd-pkg-ubuntu",
"id": "d03375678558f19a441fd2febae96d0247e3604b",
"size": "14000",
"binary": false,
"copies": "1",
"ref": "refs/heads/dpm-xenial",
"path": "test/suites/basic.sh",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Emacs Lisp",
"bytes": "255"
},
{
"name": "Go",
"bytes": "1343501"
},
{
"name": "Makefile",
"bytes": "3345"
},
{
"name": "Python",
"bytes": "41102"
},
{
"name": "Shell",
"bytes": "108636"
}
],
"symlink_target": ""
} |
/*
* Modified by the GTK+ Team and others 1997-2000. See the AUTHORS
* file for a list of people on the GTK+ Team. See the ChangeLog
* files for a list of changes. These files are distributed with
* GTK+ at ftp://ftp.gtk.org/pub/gtk/.
*/
#if !defined (__GTK_H_INSIDE__) && !defined (GTK_COMPILATION)
#error "Only <gtk/gtk.h> can be included directly."
#endif
#ifndef __GTK_EDITABLE_H__
#define __GTK_EDITABLE_H__
#include <gtk/gtkwidget.h>
G_BEGIN_DECLS
#define GTK_TYPE_EDITABLE (gtk_editable_get_type ())
#define GTK_EDITABLE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GTK_TYPE_EDITABLE, GtkEditable))
#define GTK_IS_EDITABLE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GTK_TYPE_EDITABLE))
#define GTK_EDITABLE_GET_IFACE(inst) (G_TYPE_INSTANCE_GET_INTERFACE ((inst), GTK_TYPE_EDITABLE, GtkEditableInterface))
typedef struct _GtkEditable GtkEditable; /* Dummy typedef */
typedef struct _GtkEditableInterface GtkEditableInterface;
struct _GtkEditableInterface
{
GTypeInterface base_iface;
/* signals */
void (* insert_text) (GtkEditable *editable,
const gchar *new_text,
gint new_text_length,
gint *position);
void (* delete_text) (GtkEditable *editable,
gint start_pos,
gint end_pos);
void (* changed) (GtkEditable *editable);
/* vtable */
void (* do_insert_text) (GtkEditable *editable,
const gchar *new_text,
gint new_text_length,
gint *position);
void (* do_delete_text) (GtkEditable *editable,
gint start_pos,
gint end_pos);
gchar* (* get_chars) (GtkEditable *editable,
gint start_pos,
gint end_pos);
void (* set_selection_bounds) (GtkEditable *editable,
gint start_pos,
gint end_pos);
gboolean (* get_selection_bounds) (GtkEditable *editable,
gint *start_pos,
gint *end_pos);
void (* set_position) (GtkEditable *editable,
gint position);
gint (* get_position) (GtkEditable *editable);
};
GType gtk_editable_get_type (void) G_GNUC_CONST;
void gtk_editable_select_region (GtkEditable *editable,
gint start_pos,
gint end_pos);
gboolean gtk_editable_get_selection_bounds (GtkEditable *editable,
gint *start_pos,
gint *end_pos);
void gtk_editable_insert_text (GtkEditable *editable,
const gchar *new_text,
gint new_text_length,
gint *position);
void gtk_editable_delete_text (GtkEditable *editable,
gint start_pos,
gint end_pos);
gchar* gtk_editable_get_chars (GtkEditable *editable,
gint start_pos,
gint end_pos);
void gtk_editable_cut_clipboard (GtkEditable *editable);
void gtk_editable_copy_clipboard (GtkEditable *editable);
void gtk_editable_paste_clipboard (GtkEditable *editable);
void gtk_editable_delete_selection (GtkEditable *editable);
void gtk_editable_set_position (GtkEditable *editable,
gint position);
gint gtk_editable_get_position (GtkEditable *editable);
void gtk_editable_set_editable (GtkEditable *editable,
gboolean is_editable);
gboolean gtk_editable_get_editable (GtkEditable *editable);
G_END_DECLS
#endif /* __GTK_EDITABLE_H__ */
| {
"content_hash": "138b052b42f8cef3f1ee567066960659",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 119,
"avg_line_length": 38.43877551020408,
"alnum_prop": 0.5696840987523228,
"repo_name": "jonnyniv/boost_converter",
"id": "15758dbbecf1a662ba5a51e03fa4f9463d194319",
"size": "4541",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "host/gui/GTK+/include/gtk-3.0/gtk/gtkeditable.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6868671"
},
{
"name": "C++",
"bytes": "299318"
},
{
"name": "CSS",
"bytes": "88298"
},
{
"name": "Objective-C",
"bytes": "101093"
},
{
"name": "Python",
"bytes": "263072"
},
{
"name": "Shell",
"bytes": "20030"
},
{
"name": "XSLT",
"bytes": "7232"
}
],
"symlink_target": ""
} |
package com.blocklaunch.blwarps.eventhandlers;
import com.blocklaunch.blwarps.BLWarps;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.event.Listener;
import org.spongepowered.api.event.entity.DamageEntityEvent;
public class DamageEntityEventHandler {
private BLWarps plugin;
public DamageEntityEventHandler(BLWarps plugin) {
this.plugin = plugin;
}
@Listener
public void onDamageEntity(DamageEntityEvent event) {
if (!(event.getTargetEntity() instanceof Player)) {
return;
}
Player player = (Player) event.getTargetEntity();
if (!this.plugin.getConfig().isPvpProtect()) {
return;
}
// pvp-protect setting is enabled
if (!this.plugin.getWarpManager().isWarping(player)) {
return;
}
// Player is warping
this.plugin.getWarpManager().cancelWarp(player);
}
}
| {
"content_hash": "202da27d9df0ed9d1355b5eb61d6cfe8",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 62,
"avg_line_length": 25.756756756756758,
"alnum_prop": 0.6631689401888772,
"repo_name": "BlockLaunch/BLWarps",
"id": "5f7568ba667d2da16850b8247abef9729eebbaf8",
"size": "953",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/blocklaunch/blwarps/eventhandlers/DamageEntityEventHandler.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "111805"
}
],
"symlink_target": ""
} |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.ExpectedTypeInfo;
import com.intellij.codeInsight.ExpectedTypeInfoImpl;
import com.intellij.codeInsight.ExpectedTypesProvider;
import com.intellij.codeInsight.TailType;
import com.intellij.codeInsight.completion.simple.RParenthTailType;
import com.intellij.codeInsight.completion.util.CompletionStyleUtil;
import com.intellij.codeInsight.guess.GuessManager;
import com.intellij.codeInsight.lookup.*;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.psi.impl.source.tree.java.PsiEmptyExpressionImpl;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.List;
/**
* @author peter
*/
final class SmartCastProvider {
static boolean shouldSuggestCast(CompletionParameters parameters) {
PsiElement position = parameters.getPosition();
PsiElement parent = getParenthesisOwner(position);
return parent instanceof PsiTypeCastExpression || parent instanceof PsiParenthesizedExpression;
}
static boolean inCastContext(CompletionParameters parameters) {
PsiElement position = parameters.getPosition();
PsiElement parent = getParenthesisOwner(position);
if (parent instanceof PsiTypeCastExpression) return true;
if (parent instanceof PsiParenthesizedExpression) {
return parameters.getOffset() == position.getTextRange().getStartOffset();
}
return false;
}
private static PsiElement getParenthesisOwner(PsiElement position) {
PsiElement lParen = PsiTreeUtil.prevVisibleLeaf(position);
return lParen == null || !lParen.textMatches("(") ? null : lParen.getParent();
}
static void addCastVariants(@NotNull CompletionParameters parameters, PrefixMatcher matcher, @NotNull Consumer<? super LookupElement> result, boolean quick) {
if (!shouldSuggestCast(parameters)) return;
PsiElement position = parameters.getPosition();
PsiElement parenthesisOwner = getParenthesisOwner(position);
final boolean insideCast = parenthesisOwner instanceof PsiTypeCastExpression;
if (insideCast) {
PsiElement parent = parenthesisOwner.getParent();
if (parent instanceof PsiParenthesizedExpression) {
if (parent.getParent() instanceof PsiReferenceExpression) {
for (ExpectedTypeInfo info : ExpectedTypesProvider.getExpectedTypes((PsiParenthesizedExpression)parent, false)) {
result.consume(PsiTypeLookupItem.createLookupItem(info.getType(), parent));
}
}
for (ExpectedTypeInfo info : getParenthesizedCastExpectationByOperandType(position)) {
addHierarchyTypes(parameters, matcher, info, type -> result.consume(PsiTypeLookupItem.createLookupItem(type, parent)), quick);
}
return;
}
}
for (final ExpectedTypeInfo info : JavaSmartCompletionContributor.getExpectedTypes(parameters)) {
PsiType type = info.getDefaultType();
if (type instanceof PsiWildcardType) {
type = ((PsiWildcardType)type).getBound();
}
if (type == null || PsiType.VOID.equals(type)) {
continue;
}
if (type instanceof PsiPrimitiveType) {
final PsiType castedType = getCastedExpressionType(parenthesisOwner);
if (castedType != null && !(castedType instanceof PsiPrimitiveType)) {
final PsiClassType boxedType = ((PsiPrimitiveType)type).getBoxedType(position);
if (boxedType != null) {
type = boxedType;
}
}
}
result.consume(createSmartCastElement(parameters, insideCast, type));
}
}
@NotNull
static List<ExpectedTypeInfo> getParenthesizedCastExpectationByOperandType(PsiElement position) {
PsiElement parenthesisOwner = getParenthesisOwner(position);
PsiExpression operand = getCastedExpression(parenthesisOwner);
if (operand == null || !(parenthesisOwner.getParent() instanceof PsiParenthesizedExpression)) return Collections.emptyList();
List<PsiType> dfaTypes = GuessManager.getInstance(operand.getProject()).getControlFlowExpressionTypeConjuncts(operand);
if (!dfaTypes.isEmpty()) {
return ContainerUtil.map(dfaTypes, dfaType ->
new ExpectedTypeInfoImpl(dfaType, ExpectedTypeInfo.TYPE_OR_SUPERTYPE, dfaType, TailType.NONE, null, ExpectedTypeInfoImpl.NULL));
}
PsiType type = operand.getType();
return type == null || type.equalsToText(CommonClassNames.JAVA_LANG_OBJECT) ? Collections.emptyList() :
Collections.singletonList(new ExpectedTypeInfoImpl(type, ExpectedTypeInfo.TYPE_OR_SUBTYPE, type, TailType.NONE, null, ExpectedTypeInfoImpl.NULL));
}
private static void addHierarchyTypes(CompletionParameters parameters, PrefixMatcher matcher, ExpectedTypeInfo info, Consumer<? super PsiType> result, boolean quick) {
PsiType infoType = info.getType();
PsiClass infoClass = PsiUtil.resolveClassInClassTypeOnly(infoType);
if (info.getKind() == ExpectedTypeInfo.TYPE_OR_SUPERTYPE) {
InheritanceUtil.processSupers(infoClass, true, superClass -> {
if (!CommonClassNames.JAVA_LANG_OBJECT.equals(superClass.getQualifiedName())) {
result.consume(JavaPsiFacade.getElementFactory(superClass.getProject()).createType(CompletionUtil.getOriginalOrSelf(superClass)));
}
return true;
});
} else if (infoType instanceof PsiClassType && !quick) {
JavaInheritorsGetter.processInheritors(parameters, Collections.singleton((PsiClassType)infoType), matcher, type -> {
if (!infoType.equals(type)) {
result.consume(type);
}
});
}
}
private static PsiType getCastedExpressionType(PsiElement parenthesisOwner) {
PsiExpression operand = getCastedExpression(parenthesisOwner);
return operand == null ? null : operand.getType();
}
private static PsiExpression getCastedExpression(PsiElement parenthesisOwner) {
if (parenthesisOwner instanceof PsiTypeCastExpression) {
return ((PsiTypeCastExpression)parenthesisOwner).getOperand();
}
if (parenthesisOwner instanceof PsiParenthesizedExpression) {
PsiElement next = parenthesisOwner.getNextSibling();
while ((next instanceof PsiEmptyExpressionImpl || next instanceof PsiErrorElement || next instanceof PsiWhiteSpace)) {
next = next.getNextSibling();
}
if (next instanceof PsiExpression) {
return (PsiExpression)next;
}
}
return null;
}
private static LookupElement createSmartCastElement(final CompletionParameters parameters, final boolean overwrite, final PsiType type) {
return AutoCompletionPolicy.ALWAYS_AUTOCOMPLETE.applyPolicy(new LookupElementDecorator<>(
PsiTypeLookupItem.createLookupItem(type, parameters.getPosition())) {
@Override
public void renderElement(@NotNull LookupElementPresentation presentation) {
presentation.setItemText("(" + type.getPresentableText() + ")");
PsiClass aClass = PsiUtil.resolveClassInClassTypeOnly(type);
if (aClass != null) {
presentation.setIcon(aClass.getIcon(0));
}
}
@Override
public void handleInsert(@NotNull InsertionContext context) {
FeatureUsageTracker.getInstance().triggerFeatureUsed("editing.completion.smarttype.casting");
final Editor editor = context.getEditor();
final Document document = editor.getDocument();
if (overwrite) {
document.deleteString(context.getSelectionEndOffset(),
context.getOffsetMap().getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET));
}
final CommonCodeStyleSettings csSettings = CompletionStyleUtil.getCodeStyleSettings(context);
final int oldTail = context.getTailOffset();
context.setTailOffset(RParenthTailType.addRParenth(editor, oldTail, csSettings.SPACE_WITHIN_CAST_PARENTHESES));
getDelegate().handleInsert(CompletionUtil.newContext(context, getDelegate(), context.getStartOffset(), oldTail));
PostprocessReformattingAspect.getInstance(context.getProject()).doPostponedFormatting();
if (csSettings.SPACE_AFTER_TYPE_CAST) {
context.setTailOffset(TailType.insertChar(editor, context.getTailOffset(), ' '));
}
if (parameters.getCompletionType() == CompletionType.SMART || !overwrite) {
editor.getCaretModel().moveToOffset(context.getTailOffset());
}
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
}
});
}
}
| {
"content_hash": "6f43d61dded30d736e1704e3bdd710c9",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 169,
"avg_line_length": 45.655,
"alnum_prop": 0.7384733326032198,
"repo_name": "ingokegel/intellij-community",
"id": "c0cc0df1065efa409fd326f7cbbeb137a20e1a69",
"size": "9131",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "java/java-impl/src/com/intellij/codeInsight/completion/SmartCastProvider.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.