text
stringlengths 2
100k
| meta
dict |
---|---|
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/comprehend/Comprehend_EXPORTS.h>
#include <aws/comprehend/ComprehendRequest.h>
#include <aws/comprehend/model/TopicsDetectionJobFilter.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <utility>
namespace Aws
{
namespace Comprehend
{
namespace Model
{
/**
*/
class AWS_COMPREHEND_API ListTopicsDetectionJobsRequest : public ComprehendRequest
{
public:
ListTopicsDetectionJobsRequest();
// Service request name is the Operation name which will send this request out,
// each operation should has unique request name, so that we can get operation's name from this request.
// Note: this is not true for response, multiple operations may have the same response name,
// so we can not get operation's name from response.
inline virtual const char* GetServiceRequestName() const override { return "ListTopicsDetectionJobs"; }
Aws::String SerializePayload() const override;
Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override;
/**
* <p>Filters the jobs that are returned. Jobs can be filtered on their name,
* status, or the date and time that they were submitted. You can set only one
* filter at a time.</p>
*/
inline const TopicsDetectionJobFilter& GetFilter() const{ return m_filter; }
/**
* <p>Filters the jobs that are returned. Jobs can be filtered on their name,
* status, or the date and time that they were submitted. You can set only one
* filter at a time.</p>
*/
inline bool FilterHasBeenSet() const { return m_filterHasBeenSet; }
/**
* <p>Filters the jobs that are returned. Jobs can be filtered on their name,
* status, or the date and time that they were submitted. You can set only one
* filter at a time.</p>
*/
inline void SetFilter(const TopicsDetectionJobFilter& value) { m_filterHasBeenSet = true; m_filter = value; }
/**
* <p>Filters the jobs that are returned. Jobs can be filtered on their name,
* status, or the date and time that they were submitted. You can set only one
* filter at a time.</p>
*/
inline void SetFilter(TopicsDetectionJobFilter&& value) { m_filterHasBeenSet = true; m_filter = std::move(value); }
/**
* <p>Filters the jobs that are returned. Jobs can be filtered on their name,
* status, or the date and time that they were submitted. You can set only one
* filter at a time.</p>
*/
inline ListTopicsDetectionJobsRequest& WithFilter(const TopicsDetectionJobFilter& value) { SetFilter(value); return *this;}
/**
* <p>Filters the jobs that are returned. Jobs can be filtered on their name,
* status, or the date and time that they were submitted. You can set only one
* filter at a time.</p>
*/
inline ListTopicsDetectionJobsRequest& WithFilter(TopicsDetectionJobFilter&& value) { SetFilter(std::move(value)); return *this;}
/**
* <p>Identifies the next page of results to return.</p>
*/
inline const Aws::String& GetNextToken() const{ return m_nextToken; }
/**
* <p>Identifies the next page of results to return.</p>
*/
inline bool NextTokenHasBeenSet() const { return m_nextTokenHasBeenSet; }
/**
* <p>Identifies the next page of results to return.</p>
*/
inline void SetNextToken(const Aws::String& value) { m_nextTokenHasBeenSet = true; m_nextToken = value; }
/**
* <p>Identifies the next page of results to return.</p>
*/
inline void SetNextToken(Aws::String&& value) { m_nextTokenHasBeenSet = true; m_nextToken = std::move(value); }
/**
* <p>Identifies the next page of results to return.</p>
*/
inline void SetNextToken(const char* value) { m_nextTokenHasBeenSet = true; m_nextToken.assign(value); }
/**
* <p>Identifies the next page of results to return.</p>
*/
inline ListTopicsDetectionJobsRequest& WithNextToken(const Aws::String& value) { SetNextToken(value); return *this;}
/**
* <p>Identifies the next page of results to return.</p>
*/
inline ListTopicsDetectionJobsRequest& WithNextToken(Aws::String&& value) { SetNextToken(std::move(value)); return *this;}
/**
* <p>Identifies the next page of results to return.</p>
*/
inline ListTopicsDetectionJobsRequest& WithNextToken(const char* value) { SetNextToken(value); return *this;}
/**
* <p>The maximum number of results to return in each page. The default is 100.</p>
*/
inline int GetMaxResults() const{ return m_maxResults; }
/**
* <p>The maximum number of results to return in each page. The default is 100.</p>
*/
inline bool MaxResultsHasBeenSet() const { return m_maxResultsHasBeenSet; }
/**
* <p>The maximum number of results to return in each page. The default is 100.</p>
*/
inline void SetMaxResults(int value) { m_maxResultsHasBeenSet = true; m_maxResults = value; }
/**
* <p>The maximum number of results to return in each page. The default is 100.</p>
*/
inline ListTopicsDetectionJobsRequest& WithMaxResults(int value) { SetMaxResults(value); return *this;}
private:
TopicsDetectionJobFilter m_filter;
bool m_filterHasBeenSet;
Aws::String m_nextToken;
bool m_nextTokenHasBeenSet;
int m_maxResults;
bool m_maxResultsHasBeenSet;
};
} // namespace Model
} // namespace Comprehend
} // namespace Aws
|
{
"pile_set_name": "Github"
}
|
package com.pi4j.io.i2c.impl;
/*
* #%L
* **********************************************************************
* ORGANIZATION : Pi4J
* PROJECT : Pi4J :: Java Library (Core)
* FILENAME : I2CProviderImpl.java
*
* This file is part of the Pi4J project. More information about
* this project can be found here: https://www.pi4j.com/
* **********************************************************************
* %%
* Copyright (C) 2012 - 2020 Pi4J
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Lesser Public License for more details.
*
* You should have received a copy of the GNU General Lesser Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/lgpl-3.0.html>.
* #L%
*/
import java.io.File;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import com.pi4j.io.i2c.I2CBus;
import com.pi4j.io.i2c.I2CFactory.UnsupportedBusNumberException;
import com.pi4j.io.i2c.I2CFactoryProvider;
public class I2CProviderImpl implements I2CFactoryProvider {
public I2CProviderImpl() {
}
public I2CBus getBus(final int busNumber, final long lockAquireTimeout, final TimeUnit lockAquireTimeoutUnit) throws UnsupportedBusNumberException, IOException {
final File sysfs = new File("/sys/bus/i2c/devices/i2c-" + busNumber);
if (!sysfs.exists() || !sysfs.isDirectory()) {
throw new UnsupportedBusNumberException();
}
final File devfs = new File("/dev/i2c-" + busNumber);
if (!devfs.exists() || !devfs.canRead() || !devfs.canWrite()) {
throw new UnsupportedBusNumberException();
}
I2CBusImpl result = new I2CBusImpl(busNumber, devfs.getCanonicalPath(), lockAquireTimeout, lockAquireTimeoutUnit);
result.open();
return result;
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* tegra20_spdif.h - Definitions for Tegra20 SPDIF driver
*
* Author: Stephen Warren <[email protected]>
* Copyright (C) 2011 - NVIDIA, Inc.
*
* Based on code copyright/by:
* Copyright (c) 2008-2009, NVIDIA Corporation
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* version 2 as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*
*/
#ifndef __TEGRA20_SPDIF_H__
#define __TEGRA20_SPDIF_H__
#include "tegra_pcm.h"
/* Offsets from TEGRA20_SPDIF_BASE */
#define TEGRA20_SPDIF_CTRL 0x0
#define TEGRA20_SPDIF_STATUS 0x4
#define TEGRA20_SPDIF_STROBE_CTRL 0x8
#define TEGRA20_SPDIF_DATA_FIFO_CSR 0x0C
#define TEGRA20_SPDIF_DATA_OUT 0x40
#define TEGRA20_SPDIF_DATA_IN 0x80
#define TEGRA20_SPDIF_CH_STA_RX_A 0x100
#define TEGRA20_SPDIF_CH_STA_RX_B 0x104
#define TEGRA20_SPDIF_CH_STA_RX_C 0x108
#define TEGRA20_SPDIF_CH_STA_RX_D 0x10C
#define TEGRA20_SPDIF_CH_STA_RX_E 0x110
#define TEGRA20_SPDIF_CH_STA_RX_F 0x114
#define TEGRA20_SPDIF_CH_STA_TX_A 0x140
#define TEGRA20_SPDIF_CH_STA_TX_B 0x144
#define TEGRA20_SPDIF_CH_STA_TX_C 0x148
#define TEGRA20_SPDIF_CH_STA_TX_D 0x14C
#define TEGRA20_SPDIF_CH_STA_TX_E 0x150
#define TEGRA20_SPDIF_CH_STA_TX_F 0x154
#define TEGRA20_SPDIF_USR_STA_RX_A 0x180
#define TEGRA20_SPDIF_USR_DAT_TX_A 0x1C0
/* Fields in TEGRA20_SPDIF_CTRL */
/* Start capturing from 0=right, 1=left channel */
#define TEGRA20_SPDIF_CTRL_CAP_LC (1 << 30)
/* SPDIF receiver(RX) enable */
#define TEGRA20_SPDIF_CTRL_RX_EN (1 << 29)
/* SPDIF Transmitter(TX) enable */
#define TEGRA20_SPDIF_CTRL_TX_EN (1 << 28)
/* Transmit Channel status */
#define TEGRA20_SPDIF_CTRL_TC_EN (1 << 27)
/* Transmit user Data */
#define TEGRA20_SPDIF_CTRL_TU_EN (1 << 26)
/* Interrupt on transmit error */
#define TEGRA20_SPDIF_CTRL_IE_TXE (1 << 25)
/* Interrupt on receive error */
#define TEGRA20_SPDIF_CTRL_IE_RXE (1 << 24)
/* Interrupt on invalid preamble */
#define TEGRA20_SPDIF_CTRL_IE_P (1 << 23)
/* Interrupt on "B" preamble */
#define TEGRA20_SPDIF_CTRL_IE_B (1 << 22)
/* Interrupt when block of channel status received */
#define TEGRA20_SPDIF_CTRL_IE_C (1 << 21)
/* Interrupt when a valid information unit (IU) is received */
#define TEGRA20_SPDIF_CTRL_IE_U (1 << 20)
/* Interrupt when RX user FIFO attention level is reached */
#define TEGRA20_SPDIF_CTRL_QE_RU (1 << 19)
/* Interrupt when TX user FIFO attention level is reached */
#define TEGRA20_SPDIF_CTRL_QE_TU (1 << 18)
/* Interrupt when RX data FIFO attention level is reached */
#define TEGRA20_SPDIF_CTRL_QE_RX (1 << 17)
/* Interrupt when TX data FIFO attention level is reached */
#define TEGRA20_SPDIF_CTRL_QE_TX (1 << 16)
/* Loopback test mode enable */
#define TEGRA20_SPDIF_CTRL_LBK_EN (1 << 15)
/*
* Pack data mode:
* 0 = Single data (16 bit needs to be padded to match the
* interface data bit size).
* 1 = Packeted left/right channel data into a single word.
*/
#define TEGRA20_SPDIF_CTRL_PACK (1 << 14)
/*
* 00 = 16bit data
* 01 = 20bit data
* 10 = 24bit data
* 11 = raw data
*/
#define TEGRA20_SPDIF_BIT_MODE_16BIT 0
#define TEGRA20_SPDIF_BIT_MODE_20BIT 1
#define TEGRA20_SPDIF_BIT_MODE_24BIT 2
#define TEGRA20_SPDIF_BIT_MODE_RAW 3
#define TEGRA20_SPDIF_CTRL_BIT_MODE_SHIFT 12
#define TEGRA20_SPDIF_CTRL_BIT_MODE_MASK (3 << TEGRA20_SPDIF_CTRL_BIT_MODE_SHIFT)
#define TEGRA20_SPDIF_CTRL_BIT_MODE_16BIT (TEGRA20_SPDIF_BIT_MODE_16BIT << TEGRA20_SPDIF_CTRL_BIT_MODE_SHIFT)
#define TEGRA20_SPDIF_CTRL_BIT_MODE_20BIT (TEGRA20_SPDIF_BIT_MODE_20BIT << TEGRA20_SPDIF_CTRL_BIT_MODE_SHIFT)
#define TEGRA20_SPDIF_CTRL_BIT_MODE_24BIT (TEGRA20_SPDIF_BIT_MODE_24BIT << TEGRA20_SPDIF_CTRL_BIT_MODE_SHIFT)
#define TEGRA20_SPDIF_CTRL_BIT_MODE_RAW (TEGRA20_SPDIF_BIT_MODE_RAW << TEGRA20_SPDIF_CTRL_BIT_MODE_SHIFT)
/* Fields in TEGRA20_SPDIF_STATUS */
/*
* Note: IS_P, IS_B, IS_C, and IS_U are sticky bits. Software must
* write a 1 to the corresponding bit location to clear the status.
*/
/*
* Receiver(RX) shifter is busy receiving data.
* This bit is asserted when the receiver first locked onto the
* preamble of the data stream after RX_EN is asserted. This bit is
* deasserted when either,
* (a) the end of a frame is reached after RX_EN is deeasserted, or
* (b) the SPDIF data stream becomes inactive.
*/
#define TEGRA20_SPDIF_STATUS_RX_BSY (1 << 29)
/*
* Transmitter(TX) shifter is busy transmitting data.
* This bit is asserted when TX_EN is asserted.
* This bit is deasserted when the end of a frame is reached after
* TX_EN is deasserted.
*/
#define TEGRA20_SPDIF_STATUS_TX_BSY (1 << 28)
/*
* TX is busy shifting out channel status.
* This bit is asserted when both TX_EN and TC_EN are asserted and
* data from CH_STA_TX_A register is loaded into the internal shifter.
* This bit is deasserted when either,
* (a) the end of a frame is reached after TX_EN is deasserted, or
* (b) CH_STA_TX_F register is loaded into the internal shifter.
*/
#define TEGRA20_SPDIF_STATUS_TC_BSY (1 << 27)
/*
* TX User data FIFO busy.
* This bit is asserted when TX_EN and TXU_EN are asserted and
* there's data in the TX user FIFO. This bit is deassert when either,
* (a) the end of a frame is reached after TX_EN is deasserted, or
* (b) there's no data left in the TX user FIFO.
*/
#define TEGRA20_SPDIF_STATUS_TU_BSY (1 << 26)
/* TX FIFO Underrun error status */
#define TEGRA20_SPDIF_STATUS_TX_ERR (1 << 25)
/* RX FIFO Overrun error status */
#define TEGRA20_SPDIF_STATUS_RX_ERR (1 << 24)
/* Preamble status: 0=Preamble OK, 1=bad/missing preamble */
#define TEGRA20_SPDIF_STATUS_IS_P (1 << 23)
/* B-preamble detection status: 0=not detected, 1=B-preamble detected */
#define TEGRA20_SPDIF_STATUS_IS_B (1 << 22)
/*
* RX channel block data receive status:
* 0=entire block not recieved yet.
* 1=received entire block of channel status,
*/
#define TEGRA20_SPDIF_STATUS_IS_C (1 << 21)
/* RX User Data Valid flag: 1=valid IU detected, 0 = no IU detected. */
#define TEGRA20_SPDIF_STATUS_IS_U (1 << 20)
/*
* RX User FIFO Status:
* 1=attention level reached, 0=attention level not reached.
*/
#define TEGRA20_SPDIF_STATUS_QS_RU (1 << 19)
/*
* TX User FIFO Status:
* 1=attention level reached, 0=attention level not reached.
*/
#define TEGRA20_SPDIF_STATUS_QS_TU (1 << 18)
/*
* RX Data FIFO Status:
* 1=attention level reached, 0=attention level not reached.
*/
#define TEGRA20_SPDIF_STATUS_QS_RX (1 << 17)
/*
* TX Data FIFO Status:
* 1=attention level reached, 0=attention level not reached.
*/
#define TEGRA20_SPDIF_STATUS_QS_TX (1 << 16)
/* Fields in TEGRA20_SPDIF_STROBE_CTRL */
/*
* Indicates the approximate number of detected SPDIFIN clocks within a
* bi-phase period.
*/
#define TEGRA20_SPDIF_STROBE_CTRL_PERIOD_SHIFT 16
#define TEGRA20_SPDIF_STROBE_CTRL_PERIOD_MASK (0xff << TEGRA20_SPDIF_STROBE_CTRL_PERIOD_SHIFT)
/* Data strobe mode: 0=Auto-locked 1=Manual locked */
#define TEGRA20_SPDIF_STROBE_CTRL_STROBE (1 << 15)
/*
* Manual data strobe time within the bi-phase clock period (in terms of
* the number of over-sampling clocks).
*/
#define TEGRA20_SPDIF_STROBE_CTRL_DATA_STROBES_SHIFT 8
#define TEGRA20_SPDIF_STROBE_CTRL_DATA_STROBES_MASK (0x1f << TEGRA20_SPDIF_STROBE_CTRL_DATA_STROBES_SHIFT)
/*
* Manual SPDIFIN bi-phase clock period (in terms of the number of
* over-sampling clocks).
*/
#define TEGRA20_SPDIF_STROBE_CTRL_CLOCK_PERIOD_SHIFT 0
#define TEGRA20_SPDIF_STROBE_CTRL_CLOCK_PERIOD_MASK (0x3f << TEGRA20_SPDIF_STROBE_CTRL_CLOCK_PERIOD_SHIFT)
/* Fields in SPDIF_DATA_FIFO_CSR */
/* Clear Receiver User FIFO (RX USR.FIFO) */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_CLR (1 << 31)
#define TEGRA20_SPDIF_FIFO_ATN_LVL_U_ONE_SLOT 0
#define TEGRA20_SPDIF_FIFO_ATN_LVL_U_TWO_SLOTS 1
#define TEGRA20_SPDIF_FIFO_ATN_LVL_U_THREE_SLOTS 2
#define TEGRA20_SPDIF_FIFO_ATN_LVL_U_FOUR_SLOTS 3
/* RU FIFO attention level */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_SHIFT 29
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_MASK \
(0x3 << TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_RU1_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_ONE_SLOT << TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_RU2_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_TWO_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_RU3_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_THREE_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_RU4_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_FOUR_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_RU_ATN_LVL_SHIFT)
/* Number of RX USR.FIFO levels with valid data. */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_FULL_COUNT_SHIFT 24
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RU_FULL_COUNT_MASK (0x1f << TEGRA20_SPDIF_DATA_FIFO_CSR_RU_FULL_COUNT_SHIFT)
/* Clear Transmitter User FIFO (TX USR.FIFO) */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_CLR (1 << 23)
/* TU FIFO attention level */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_SHIFT 21
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_MASK \
(0x3 << TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_TU1_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_ONE_SLOT << TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_TU2_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_TWO_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_TU3_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_THREE_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_TU4_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_U_FOUR_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_TU_ATN_LVL_SHIFT)
/* Number of TX USR.FIFO levels that could be filled. */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_EMPTY_COUNT_SHIFT 16
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TU_EMPTY_COUNT_MASK (0x1f << SPDIF_DATA_FIFO_CSR_TU_EMPTY_COUNT_SHIFT)
/* Clear Receiver Data FIFO (RX DATA.FIFO) */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_CLR (1 << 15)
#define TEGRA20_SPDIF_FIFO_ATN_LVL_D_ONE_SLOT 0
#define TEGRA20_SPDIF_FIFO_ATN_LVL_D_FOUR_SLOTS 1
#define TEGRA20_SPDIF_FIFO_ATN_LVL_D_EIGHT_SLOTS 2
#define TEGRA20_SPDIF_FIFO_ATN_LVL_D_TWELVE_SLOTS 3
/* RU FIFO attention level */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_SHIFT 13
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_MASK \
(0x3 << TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_RU1_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_ONE_SLOT << TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_RU4_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_FOUR_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_RU8_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_EIGHT_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_RU12_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_TWELVE_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_RX_ATN_LVL_SHIFT)
/* Number of RX DATA.FIFO levels with valid data. */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_FULL_COUNT_SHIFT 8
#define TEGRA20_SPDIF_DATA_FIFO_CSR_RX_FULL_COUNT_MASK (0x1f << TEGRA20_SPDIF_DATA_FIFO_CSR_RX_FULL_COUNT_SHIFT)
/* Clear Transmitter Data FIFO (TX DATA.FIFO) */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_CLR (1 << 7)
/* TU FIFO attention level */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_SHIFT 5
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_MASK \
(0x3 << TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_TU1_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_ONE_SLOT << TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_TU4_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_FOUR_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_TU8_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_EIGHT_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_SHIFT)
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_TU12_WORD_FULL \
(TEGRA20_SPDIF_FIFO_ATN_LVL_D_TWELVE_SLOTS << TEGRA20_SPDIF_DATA_FIFO_CSR_TX_ATN_LVL_SHIFT)
/* Number of TX DATA.FIFO levels that could be filled. */
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_EMPTY_COUNT_SHIFT 0
#define TEGRA20_SPDIF_DATA_FIFO_CSR_TX_EMPTY_COUNT_MASK (0x1f << SPDIF_DATA_FIFO_CSR_TX_EMPTY_COUNT_SHIFT)
/* Fields in TEGRA20_SPDIF_DATA_OUT */
/*
* This register has 5 different formats:
* 16-bit (BIT_MODE=00, PACK=0)
* 20-bit (BIT_MODE=01, PACK=0)
* 24-bit (BIT_MODE=10, PACK=0)
* raw (BIT_MODE=11, PACK=0)
* 16-bit packed (BIT_MODE=00, PACK=1)
*/
#define TEGRA20_SPDIF_DATA_OUT_DATA_16_SHIFT 0
#define TEGRA20_SPDIF_DATA_OUT_DATA_16_MASK (0xffff << TEGRA20_SPDIF_DATA_OUT_DATA_16_SHIFT)
#define TEGRA20_SPDIF_DATA_OUT_DATA_20_SHIFT 0
#define TEGRA20_SPDIF_DATA_OUT_DATA_20_MASK (0xfffff << TEGRA20_SPDIF_DATA_OUT_DATA_20_SHIFT)
#define TEGRA20_SPDIF_DATA_OUT_DATA_24_SHIFT 0
#define TEGRA20_SPDIF_DATA_OUT_DATA_24_MASK (0xffffff << TEGRA20_SPDIF_DATA_OUT_DATA_24_SHIFT)
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_P (1 << 31)
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_C (1 << 30)
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_U (1 << 29)
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_V (1 << 28)
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_DATA_SHIFT 8
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_DATA_MASK (0xfffff << TEGRA20_SPDIF_DATA_OUT_DATA_RAW_DATA_SHIFT)
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_AUX_SHIFT 4
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_AUX_MASK (0xf << TEGRA20_SPDIF_DATA_OUT_DATA_RAW_AUX_SHIFT)
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_PREAMBLE_SHIFT 0
#define TEGRA20_SPDIF_DATA_OUT_DATA_RAW_PREAMBLE_MASK (0xf << TEGRA20_SPDIF_DATA_OUT_DATA_RAW_PREAMBLE_SHIFT)
#define TEGRA20_SPDIF_DATA_OUT_DATA_16_PACKED_RIGHT_SHIFT 16
#define TEGRA20_SPDIF_DATA_OUT_DATA_16_PACKED_RIGHT_MASK (0xffff << TEGRA20_SPDIF_DATA_OUT_DATA_16_PACKED_RIGHT_SHIFT)
#define TEGRA20_SPDIF_DATA_OUT_DATA_16_PACKED_LEFT_SHIFT 0
#define TEGRA20_SPDIF_DATA_OUT_DATA_16_PACKED_LEFT_MASK (0xffff << TEGRA20_SPDIF_DATA_OUT_DATA_16_PACKED_LEFT_SHIFT)
/* Fields in TEGRA20_SPDIF_DATA_IN */
/*
* This register has 5 different formats:
* 16-bit (BIT_MODE=00, PACK=0)
* 20-bit (BIT_MODE=01, PACK=0)
* 24-bit (BIT_MODE=10, PACK=0)
* raw (BIT_MODE=11, PACK=0)
* 16-bit packed (BIT_MODE=00, PACK=1)
*
* Bits 31:24 are common to all modes except 16-bit packed
*/
#define TEGRA20_SPDIF_DATA_IN_DATA_P (1 << 31)
#define TEGRA20_SPDIF_DATA_IN_DATA_C (1 << 30)
#define TEGRA20_SPDIF_DATA_IN_DATA_U (1 << 29)
#define TEGRA20_SPDIF_DATA_IN_DATA_V (1 << 28)
#define TEGRA20_SPDIF_DATA_IN_DATA_PREAMBLE_SHIFT 24
#define TEGRA20_SPDIF_DATA_IN_DATA_PREAMBLE_MASK (0xf << TEGRA20_SPDIF_DATA_IN_DATA_PREAMBLE_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_16_SHIFT 0
#define TEGRA20_SPDIF_DATA_IN_DATA_16_MASK (0xffff << TEGRA20_SPDIF_DATA_IN_DATA_16_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_20_SHIFT 0
#define TEGRA20_SPDIF_DATA_IN_DATA_20_MASK (0xfffff << TEGRA20_SPDIF_DATA_IN_DATA_20_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_24_SHIFT 0
#define TEGRA20_SPDIF_DATA_IN_DATA_24_MASK (0xffffff << TEGRA20_SPDIF_DATA_IN_DATA_24_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_RAW_DATA_SHIFT 8
#define TEGRA20_SPDIF_DATA_IN_DATA_RAW_DATA_MASK (0xfffff << TEGRA20_SPDIF_DATA_IN_DATA_RAW_DATA_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_RAW_AUX_SHIFT 4
#define TEGRA20_SPDIF_DATA_IN_DATA_RAW_AUX_MASK (0xf << TEGRA20_SPDIF_DATA_IN_DATA_RAW_AUX_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_RAW_PREAMBLE_SHIFT 0
#define TEGRA20_SPDIF_DATA_IN_DATA_RAW_PREAMBLE_MASK (0xf << TEGRA20_SPDIF_DATA_IN_DATA_RAW_PREAMBLE_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_16_PACKED_RIGHT_SHIFT 16
#define TEGRA20_SPDIF_DATA_IN_DATA_16_PACKED_RIGHT_MASK (0xffff << TEGRA20_SPDIF_DATA_IN_DATA_16_PACKED_RIGHT_SHIFT)
#define TEGRA20_SPDIF_DATA_IN_DATA_16_PACKED_LEFT_SHIFT 0
#define TEGRA20_SPDIF_DATA_IN_DATA_16_PACKED_LEFT_MASK (0xffff << TEGRA20_SPDIF_DATA_IN_DATA_16_PACKED_LEFT_SHIFT)
/* Fields in TEGRA20_SPDIF_CH_STA_RX_A */
/* Fields in TEGRA20_SPDIF_CH_STA_RX_B */
/* Fields in TEGRA20_SPDIF_CH_STA_RX_C */
/* Fields in TEGRA20_SPDIF_CH_STA_RX_D */
/* Fields in TEGRA20_SPDIF_CH_STA_RX_E */
/* Fields in TEGRA20_SPDIF_CH_STA_RX_F */
/*
* The 6-word receive channel data page buffer holds a block (192 frames) of
* channel status information. The order of receive is from LSB to MSB
* bit, and from CH_STA_RX_A to CH_STA_RX_F then back to CH_STA_RX_A.
*/
/* Fields in TEGRA20_SPDIF_CH_STA_TX_A */
/* Fields in TEGRA20_SPDIF_CH_STA_TX_B */
/* Fields in TEGRA20_SPDIF_CH_STA_TX_C */
/* Fields in TEGRA20_SPDIF_CH_STA_TX_D */
/* Fields in TEGRA20_SPDIF_CH_STA_TX_E */
/* Fields in TEGRA20_SPDIF_CH_STA_TX_F */
/*
* The 6-word transmit channel data page buffer holds a block (192 frames) of
* channel status information. The order of transmission is from LSB to MSB
* bit, and from CH_STA_TX_A to CH_STA_TX_F then back to CH_STA_TX_A.
*/
/* Fields in TEGRA20_SPDIF_USR_STA_RX_A */
/*
* This 4-word deep FIFO receives user FIFO field information. The order of
* receive is from LSB to MSB bit.
*/
/* Fields in TEGRA20_SPDIF_USR_DAT_TX_A */
/*
* This 4-word deep FIFO transmits user FIFO field information. The order of
* transmission is from LSB to MSB bit.
*/
struct tegra20_spdif {
struct clk *clk_spdif_out;
struct snd_dmaengine_dai_dma_data capture_dma_data;
struct snd_dmaengine_dai_dma_data playback_dma_data;
struct regmap *regmap;
};
#endif
|
{
"pile_set_name": "Github"
}
|
# https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#securitySchemeObject
type: basic
|
{
"pile_set_name": "Github"
}
|
/*=========================================================================
Library: CTK
Copyright (c) Kitware Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0.txt
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================================================*/
// CTK includes
#include "ctkDICOMTableManagerPlugin.h"
#include "ctkDICOMTableManager.h"
//-----------------------------------------------------------------------------
ctkDICOMTableManagerPlugin::ctkDICOMTableManagerPlugin(QObject* pluginParent)
: QObject(pluginParent)
{
}
//-----------------------------------------------------------------------------
QWidget *ctkDICOMTableManagerPlugin::createWidget(QWidget *parentForWidget)
{
ctkDICOMTableManager* newWidget = new ctkDICOMTableManager(parentForWidget);
return newWidget;
}
//-----------------------------------------------------------------------------
QString ctkDICOMTableManagerPlugin::domXml() const
{
return "<widget class=\"ctkDICOMTableManager\" \
name=\"DICOMTableManager\">\n"
"</widget>\n";
}
// --------------------------------------------------------------------------
QIcon ctkDICOMTableManagerPlugin::icon() const
{
return QIcon(":/Icons/listview.png");
}
//-----------------------------------------------------------------------------
QString ctkDICOMTableManagerPlugin::includeFile() const
{
return "ctkDICOMTableManager.h";
}
//-----------------------------------------------------------------------------
bool ctkDICOMTableManagerPlugin::isContainer() const
{
return false;
}
//-----------------------------------------------------------------------------
QString ctkDICOMTableManagerPlugin::name() const
{
return "ctkDICOMTableManager";
}
|
{
"pile_set_name": "Github"
}
|
# $OpenBSD: Makefile,v 1.2 2015/09/13 15:19:19 nigel Exp $
COMMENT = set of fonts designed to complement Source Sans Pro
VERSION = 1.017
REVISION = 0
GH_PROJECT = source-serif-pro
GH_TAGNAME = ${VERSION}R
.include <bsd.port.mk>
|
{
"pile_set_name": "Github"
}
|
@{
Layout = "";
}
@model Grand.Plugin.DiscountRequirements.HasAllProducts.Models.RequirementModel
@using Grand.Framework;
<script>
$(document).ready(function () {
$('#saveHasAllProductsrequirement@(Model.RequirementId)').click(function () {
var productIds = $("#@Html.FieldIdFor(model => model.Products)").val();
var discountId = '@Model.DiscountId';
var requirementId = '@Model.RequirementId';
var postData = {
discountId: discountId,
discountRequirementId: requirementId,
productIds: productIds
};
addAntiForgeryToken(postData);
$.ajax({
cache:false,
type: "POST",
url: "@(Url.Action("Configure", "HasAllProducts"))",
data: postData,
success: function (data) {
$('#pnl-save-requirement-result@(Model.RequirementId)').fadeIn("slow").delay(1000).fadeOut("slow");
//notify parent if it's a new requirement
@if (String.IsNullOrEmpty(Model.RequirementId))
{
<text>$("#discountRequirementContainer").trigger('grandnewdiscountruleadded', [data.NewRequirementId]);</text>
}
},
error:function (xhr, ajaxOptions, thrownError){
alert('@Html.Raw(System.Text.Encodings.Web.JavaScriptEncoder.Default.Encode(T("Admin.Promotions.Discounts.Requirements.FailedToSave").Text))');
}
});
});
});
</script>
<div class="form-body">
<admin-label asp-for="Products" />
<div class="col-md-8 col-sm-8">
<admin-input asp-for="Products" />
<span class="control-label" style="text-align:left" id="discountrequirement-product-names@(Model.RequirementId)"></span>
<div class="group-btn btn-group-devided margin-top-10">
<input type="button" id="saveHasAllProductsrequirement@(Model.RequirementId)" class="btn default" value="@T("Admin.Common.Save")" />
<input type="submit" id="btnAddNewDiscountRequirementProduct@(Model.RequirementId)" value="@T("Plugins.DiscountRequirements.HasAllProducts.Fields.Products.AddNew")" onclick="javascript:OpenWindow('@(Url.RouteUrl("Plugin.DiscountRequirements.HasAllProducts.ProductAddPopup", new { btnId = "btnRefreshDiscountRequirementProducts" + Model.RequirementId, productIdsInput = Html.FieldIdFor(model => model.Products) }))', 800, 800, true); return false;" class="btn default" />
</div>
<div id="pnl-save-requirement-result@(Model.RequirementId)" style="display:none;">@T("Admin.Promotions.Discounts.Requirements.Saved")</div>
<span id="discountrequirement-products-check-progress@(Model.RequirementId)" style="display: none;" class="please-wait">@T("Common.Wait...")</span>
<input type="submit" id="btnRefreshDiscountRequirementProducts@(Model.RequirementId)" style="display: none" />
<script>
$(document).ready(function () {
$('#btnRefreshDiscountRequirementProducts@(Model.RequirementId)').click(function () {
//refresh product list
loadDiscountRequirementProductFriendlyNames@(Model.RequirementId)();
//return false to don't reload a page
return false;
});
});
$(document).ready(function () {
loadDiscountRequirementProductFriendlyNames@(Model.RequirementId)();
});
$(document).ready(function () {
$('#@Html.FieldIdFor(model => model.Products)')
.data('timeout', null)
.keyup(function() {
clearTimeout($(this).data('timeout'));
//one second timeout
$(this).data('timeout', setTimeout(loadDiscountRequirementProductFriendlyNames@(Model.RequirementId), 1000));
});
});
function loadDiscountRequirementProductFriendlyNames@(Model.RequirementId)() {
var inputValue = $('#@Html.FieldIdFor(model => model.Products)').val();
if (inputValue) {
//load friendly names
$('#discountrequirement-products-check-progress@(Model.RequirementId)').show();
var postData = {
productIds: inputValue
};
addAntiForgeryToken(postData);
$.ajax({
cache: false,
type: 'POST',
url: '@Url.RouteUrl("Plugin.DiscountRequirements.HasAllProducts.LoadProductFriendlyNames")',
data: postData,
dataType: 'json',
success: function(data) {
$('#discountrequirement-products-check-progress@(Model.RequirementId)').hide();
$('#discountrequirement-product-names@(Model.RequirementId)').text(data.Text);
},
failure: function () {
$('#discountrequirement-products-check-progress@(Model.RequirementId)').hide();
$('#discountrequirement-product-names@(Model.RequirementId)').text('');
}
});
} else {
$('#discountrequirement-product-names@(Model.RequirementId)').text('');
}
}
</script>
</div>
</div>
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2019 The FATE Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webank.ai.fate.core.error.exception;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.exception.ExceptionUtils;
import java.util.List;
public class MultipleRuntimeThrowables extends Throwable {
private List<Throwable> throwables;
public MultipleRuntimeThrowables() {
throwables = Lists.newArrayList();
}
public MultipleRuntimeThrowables(String message, List<Throwable> throwables) {
super(message, new Throwable(stackTraceMessage(throwables)));
}
private static String stackTraceMessage(List<Throwable> throwables) {
StringBuilder sb = new StringBuilder();
int idx = 0;
for (Throwable throwable : throwables) {
sb.append("idx: ")
.append(idx++)
.append("\n")
.append(ExceptionUtils.getStackTrace(throwable))
.append("\n\n");
}
return sb.toString();
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
defined('C5_EXECUTE') or die("Access Denied.");
/**
* Bundles groups together into different sets.
* @package Users
* @author Andrew Embler <[email protected]>
* @category Concrete
* @copyright Copyright (c) 2003-2008 Concrete5. (http://www.concrete5.org)
* @license http://www.concrete5.org/license/ MIT License
*
*/
class Concrete5_Model_GroupSet extends ConcreteObject {
public static function getList() {
$db = Loader::db();
$r = $db->Execute('select gsID from GroupSets order by gsName asc');
$list = array();
while ($row = $r->FetchRow()) {
$list[] = GroupSet::getByID($row['gsID']);
}
return $list;
}
public static function getByID($gsID) {
$db = Loader::db();
$row = $db->GetRow('select gsID, pkgID, gsName from GroupSets where gsID = ?', array($gsID));
if (isset($row['gsID'])) {
$gs = new GroupSet();
$gs->setPropertiesFromArray($row);
return $gs;
}
}
public static function getByName($gsName) {
$db = Loader::db();
$row = $db->GetRow('select gsID, pkgID, gsName from GroupSets where gsName = ?', array($gsName));
if (isset($row['gsID'])) {
$gs = new GroupSet();
$gs->setPropertiesFromArray($row);
return $gs;
}
}
public static function getListByPackage($pkg) {
$db = Loader::db();
$list = array();
$r = $db->Execute('select gsID from GroupSets where pkgID = ? order by gsID asc', array($pkg->getPackageID()));
while ($row = $r->FetchRow()) {
$list[] = GroupSet::getByID($row['gsID']);
}
$r->Close();
return $list;
}
public function getGroupSetID() {return $this->gsID;}
public function getGroupSetName() {return $this->gsName;}
public function getPackageID() {return $this->pkgID;}
/** Returns the display name for this group set (localized and escaped accordingly to $format)
* @param string $format = 'html'
* Escape the result in html format (if $format is 'html').
* If $format is 'text' or any other value, the display name won't be escaped.
* @return string
*/
public function getGroupSetDisplayName($format = 'html') {
$value = tc('GroupSetName', $this->getGroupSetName());
switch($format) {
case 'html':
return h($value);
case 'text':
default:
return $value;
}
}
public function updateGroupSetName($gsName) {
$this->gsName = $gsName;
$db = Loader::db();
$db->Execute("update GroupSets set gsName = ? where gsID = ?", array($gsName, $this->gsID));
}
public function addGroup(Group $g) {
$db = Loader::db();
$no = $db->GetOne("select count(gID) from GroupSetGroups where gID = ? and gsID = ?", array($g->getGroupID(), $this->getGroupSetID()));
if ($no < 1) {
$db->Execute('insert into GroupSetGroups (gsID, gID) values (?, ?)', array($this->getGroupSetID(), $g->getGroupID()));
}
}
public static function add($gsName, $pkg = false) {
$db = Loader::db();
$pkgID = 0;
if (is_object($pkg)) {
$pkgID = $pkg->getPackageID();
}
$db->Execute('insert into GroupSets (gsName, pkgID) values (?,?)', array($gsName, $pkgID));
$id = $db->Insert_ID();
$gs = GroupSet::getByID($id);
return $gs;
}
public function clearGroups() {
$db = Loader::db();
$db->Execute('delete from GroupSetGroups where gsID = ?', array($this->gsID));
}
public function getGroups() {
$db = Loader::db();
$r = $db->Execute('select gID from GroupSetGroups where gsID = ? order by gID asc', $this->getGroupSetId());
$groups = array();
while ($row = $r->FetchRow()) {
$g = Group::getByID($row['gID']);
if (is_object($g)) {
$groups[] = $g;
}
}
return $groups;
}
public function contains(Group $g) {
$db = Loader::db();
$r = $db->GetOne('select count(gID) from GroupSetGroups where gsID = ? and gID = ?', array($this->getGroupSetID(), $g->getGroupID()));
return $r > 0;
}
public function delete() {
$db = Loader::db();
$db->Execute('delete from GroupSets where gsID = ?', array($this->getGroupSetID()));
$db->Execute('delete from GroupSetGroups where gsID = ?', array($this->getGroupSetID()));
}
public function removeGroup(Group $g) {
$db = Loader::db();
$db->Execute('delete from GroupSetGroups where gsID = ? and gID = ?', array($this->getGroupSetID(), $g->getGroupID()));
}
}
|
{
"pile_set_name": "Github"
}
|
/****************************************************************************
**
** Copyright (C) 2016 The Qt Company Ltd.
** Contact: https://www.qt.io/licensing/
**
** This file is part of the QtGui module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 3 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL3 included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 3 requirements
** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 2.0 or (at your option) the GNU General
** Public license version 3 or any later version approved by the KDE Free
** Qt Foundation. The licenses are as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-2.0.html and
** https://www.gnu.org/licenses/gpl-3.0.html.
**
** $QT_END_LICENSE$
**
****************************************************************************/
#ifndef QMATRIX_H
#define QMATRIX_H
#include <QtGui/qtguiglobal.h>
#include <QtGui/qpolygon.h>
#include <QtGui/qregion.h>
#include <QtGui/qwindowdefs.h>
#include <QtCore/qline.h>
#include <QtCore/qpoint.h>
#include <QtCore/qrect.h>
QT_BEGIN_NAMESPACE
class QPainterPath;
class QVariant;
class Q_GUI_EXPORT QMatrix // 2D transform matrix
{
public:
inline explicit QMatrix(Qt::Initialization) {}
QMatrix();
QMatrix(qreal m11, qreal m12, qreal m21, qreal m22,
qreal dx, qreal dy);
#if QT_VERSION < QT_VERSION_CHECK(6, 0, 0)
// ### Qt 6: remove; the compiler-generated ones are fine!
QMatrix &operator=(QMatrix &&other) Q_DECL_NOTHROW // = default
{ memcpy(static_cast<void *>(this), static_cast<void *>(&other), sizeof(QMatrix)); return *this; }
QMatrix &operator=(const QMatrix &) Q_DECL_NOTHROW; // = default
QMatrix(QMatrix &&other) Q_DECL_NOTHROW // = default
{ memcpy(static_cast<void *>(this), static_cast<void *>(&other), sizeof(QMatrix)); }
QMatrix(const QMatrix &other) Q_DECL_NOTHROW; // = default
#endif
void setMatrix(qreal m11, qreal m12, qreal m21, qreal m22,
qreal dx, qreal dy);
qreal m11() const { return _m11; }
qreal m12() const { return _m12; }
qreal m21() const { return _m21; }
qreal m22() const { return _m22; }
qreal dx() const { return _dx; }
qreal dy() const { return _dy; }
void map(int x, int y, int *tx, int *ty) const;
void map(qreal x, qreal y, qreal *tx, qreal *ty) const;
QRect mapRect(const QRect &) const;
QRectF mapRect(const QRectF &) const;
QPoint map(const QPoint &p) const;
QPointF map(const QPointF&p) const;
QLine map(const QLine &l) const;
QLineF map(const QLineF &l) const;
QPolygonF map(const QPolygonF &a) const;
QPolygon map(const QPolygon &a) const;
QRegion map(const QRegion &r) const;
QPainterPath map(const QPainterPath &p) const;
QPolygon mapToPolygon(const QRect &r) const;
void reset();
inline bool isIdentity() const;
QMatrix &translate(qreal dx, qreal dy);
QMatrix &scale(qreal sx, qreal sy);
QMatrix &shear(qreal sh, qreal sv);
QMatrix &rotate(qreal a);
bool isInvertible() const { return !qFuzzyIsNull(_m11*_m22 - _m12*_m21); }
qreal determinant() const { return _m11*_m22 - _m12*_m21; }
Q_REQUIRED_RESULT QMatrix inverted(bool *invertible = nullptr) const;
bool operator==(const QMatrix &) const;
bool operator!=(const QMatrix &) const;
QMatrix &operator*=(const QMatrix &);
QMatrix operator*(const QMatrix &o) const;
operator QVariant() const;
private:
inline QMatrix(bool)
: _m11(1.)
, _m12(0.)
, _m21(0.)
, _m22(1.)
, _dx(0.)
, _dy(0.) {}
inline QMatrix(qreal am11, qreal am12, qreal am21, qreal am22, qreal adx, qreal ady, bool)
: _m11(am11)
, _m12(am12)
, _m21(am21)
, _m22(am22)
, _dx(adx)
, _dy(ady) {}
friend class QTransform;
qreal _m11, _m12;
qreal _m21, _m22;
qreal _dx, _dy;
};
Q_DECLARE_TYPEINFO(QMatrix, Q_MOVABLE_TYPE);
Q_GUI_EXPORT Q_DECL_CONST_FUNCTION uint qHash(const QMatrix &key, uint seed = 0) Q_DECL_NOTHROW;
// mathematical semantics
inline QPoint operator*(const QPoint &p, const QMatrix &m)
{ return m.map(p); }
inline QPointF operator*(const QPointF &p, const QMatrix &m)
{ return m.map(p); }
inline QLineF operator*(const QLineF &l, const QMatrix &m)
{ return m.map(l); }
inline QLine operator*(const QLine &l, const QMatrix &m)
{ return m.map(l); }
inline QPolygon operator *(const QPolygon &a, const QMatrix &m)
{ return m.map(a); }
inline QPolygonF operator *(const QPolygonF &a, const QMatrix &m)
{ return m.map(a); }
inline QRegion operator *(const QRegion &r, const QMatrix &m)
{ return m.map(r); }
Q_GUI_EXPORT QPainterPath operator *(const QPainterPath &p, const QMatrix &m);
inline bool QMatrix::isIdentity() const
{
return qFuzzyIsNull(_m11 - 1) && qFuzzyIsNull(_m22 - 1) && qFuzzyIsNull(_m12)
&& qFuzzyIsNull(_m21) && qFuzzyIsNull(_dx) && qFuzzyIsNull(_dy);
}
inline bool qFuzzyCompare(const QMatrix& m1, const QMatrix& m2)
{
return qFuzzyCompare(m1.m11(), m2.m11())
&& qFuzzyCompare(m1.m12(), m2.m12())
&& qFuzzyCompare(m1.m21(), m2.m21())
&& qFuzzyCompare(m1.m22(), m2.m22())
&& qFuzzyCompare(m1.dx(), m2.dx())
&& qFuzzyCompare(m1.dy(), m2.dy());
}
/*****************************************************************************
QMatrix stream functions
*****************************************************************************/
#ifndef QT_NO_DATASTREAM
Q_GUI_EXPORT QDataStream &operator<<(QDataStream &, const QMatrix &);
Q_GUI_EXPORT QDataStream &operator>>(QDataStream &, QMatrix &);
#endif
#ifndef QT_NO_DEBUG_STREAM
Q_GUI_EXPORT QDebug operator<<(QDebug, const QMatrix &);
#endif
QT_END_NAMESPACE
#endif // QMATRIX_H
|
{
"pile_set_name": "Github"
}
|
.syntax-theme-base .syntax .html .doctype {
font-weight: bold;
color: #993388; }
.syntax-theme-base .syntax .html .javascript, .syntax-theme-base .syntax .html .css {
font-style: italic; }
|
{
"pile_set_name": "Github"
}
|
import DiffMatchPatch from 'diff-match-patch';
import cledit from './cledit';
import utils from '../utils';
import diffUtils from '../diffUtils';
import store from '../../store';
import EditorClassApplier from '../../components/common/EditorClassApplier';
import PreviewClassApplier from '../../components/common/PreviewClassApplier';
let clEditor;
// let discussionIds = {};
let discussionMarkers = {};
let markerKeys;
let markerIdxMap;
let previousPatchableText;
let currentPatchableText;
let isChangePatch;
let contentId;
let editorClassAppliers = {};
let previewClassAppliers = {};
function getDiscussionMarkers(discussion, discussionId, onMarker) {
const getMarker = (offsetName) => {
const markerKey = `${discussionId}:${offsetName}`;
let marker = discussionMarkers[markerKey];
if (!marker) {
marker = new cledit.Marker(discussion[offsetName], offsetName === 'end');
marker.discussionId = discussionId;
marker.offsetName = offsetName;
clEditor.addMarker(marker);
discussionMarkers[markerKey] = marker;
}
onMarker(marker);
};
getMarker('start');
getMarker('end');
}
function syncDiscussionMarkers(content, writeOffsets) {
const discussions = {
...content.discussions,
};
const newDiscussion = store.getters['discussion/newDiscussion'];
if (newDiscussion) {
discussions[store.state.discussion.newDiscussionId] = {
...newDiscussion,
};
}
Object.entries(discussionMarkers).forEach(([markerKey, marker]) => {
// Remove marker if discussion was removed
const discussion = discussions[marker.discussionId];
if (!discussion) {
clEditor.removeMarker(marker);
delete discussionMarkers[markerKey];
}
});
Object.entries(discussions).forEach(([discussionId, discussion]) => {
getDiscussionMarkers(discussion, discussionId, writeOffsets
? (marker) => {
discussion[marker.offsetName] = marker.offset;
}
: (marker) => {
marker.offset = discussion[marker.offsetName];
});
});
if (writeOffsets && newDiscussion) {
store.commit(
'discussion/patchNewDiscussion',
discussions[store.state.discussion.newDiscussionId],
);
}
}
function removeDiscussionMarkers() {
Object.entries(discussionMarkers).forEach(([, marker]) => {
clEditor.removeMarker(marker);
});
discussionMarkers = {};
markerKeys = [];
markerIdxMap = Object.create(null);
}
const diffMatchPatch = new DiffMatchPatch();
function makePatches() {
const diffs = diffMatchPatch.diff_main(previousPatchableText, currentPatchableText);
return diffMatchPatch.patch_make(previousPatchableText, diffs);
}
function applyPatches(patches) {
const newPatchableText = diffMatchPatch.patch_apply(patches, currentPatchableText)[0];
let result = newPatchableText;
if (markerKeys.length) {
// Strip text markers
result = result.replace(new RegExp(`[\ue000-${String.fromCharCode((0xe000 + markerKeys.length) - 1)}]`, 'g'), '');
}
// Expect a `contentChanged` event
if (result !== clEditor.getContent()) {
previousPatchableText = currentPatchableText;
currentPatchableText = newPatchableText;
isChangePatch = true;
}
return result;
}
function reversePatches(patches) {
const result = diffMatchPatch.patch_deepCopy(patches).reverse();
result.forEach((patch) => {
patch.diffs.forEach((diff) => {
diff[0] = -diff[0];
});
});
return result;
}
export default {
createClEditor(editorElt) {
this.clEditor = cledit(editorElt, editorElt.parentNode, true);
({ clEditor } = this);
clEditor.on('contentChanged', (text) => {
const oldContent = store.getters['content/current'];
const newContent = {
...utils.deepCopy(oldContent),
text: utils.sanitizeText(text),
};
syncDiscussionMarkers(newContent, true);
if (!isChangePatch) {
previousPatchableText = currentPatchableText;
currentPatchableText = diffUtils.makePatchableText(newContent, markerKeys, markerIdxMap);
} else {
// Take a chance to restore discussion offsets on undo/redo
newContent.text = currentPatchableText;
diffUtils.restoreDiscussionOffsets(newContent, markerKeys);
syncDiscussionMarkers(newContent, false);
}
store.dispatch('content/patchCurrent', newContent);
isChangePatch = false;
});
clEditor.on('focus', () => store.commit('discussion/setNewCommentFocus', false));
},
initClEditorInternal(opts) {
const content = store.getters['content/current'];
if (content) {
removeDiscussionMarkers(); // Markers will be recreated on contentChanged
const contentState = store.getters['contentState/current'];
const options = Object.assign({
selectionStart: contentState.selectionStart,
selectionEnd: contentState.selectionEnd,
patchHandler: {
makePatches,
applyPatches,
reversePatches,
},
}, opts);
if (contentId !== content.id) {
contentId = content.id;
currentPatchableText = diffUtils.makePatchableText(content, markerKeys, markerIdxMap);
previousPatchableText = currentPatchableText;
syncDiscussionMarkers(content, false);
options.content = content.text;
}
clEditor.init(options);
}
},
applyContent() {
if (clEditor) {
const content = store.getters['content/current'];
if (clEditor.setContent(content.text, true).range) {
// Marker will be recreated on contentChange
removeDiscussionMarkers();
} else {
syncDiscussionMarkers(content, false);
}
}
},
getTrimmedSelection() {
const { selectionMgr } = clEditor;
let start = Math.min(selectionMgr.selectionStart, selectionMgr.selectionEnd);
let end = Math.max(selectionMgr.selectionStart, selectionMgr.selectionEnd);
const text = clEditor.getContent();
while ((text[start] || '').match(/\s/)) {
start += 1;
}
while ((text[end - 1] || '').match(/\s/)) {
end -= 1;
}
return start < end && { start, end };
},
initHighlighters() {
store.watch(
() => store.getters['discussion/newDiscussion'],
() => syncDiscussionMarkers(store.getters['content/current'], false),
);
store.watch(
() => store.getters['discussion/currentFileDiscussions'],
(discussions) => {
const classGetter = (type, discussionId) => () => {
const classes = [`discussion-${type}-highlighting--${discussionId}`, `discussion-${type}-highlighting`];
if (store.state.discussion.currentDiscussionId === discussionId) {
classes.push(`discussion-${type}-highlighting--selected`);
}
return classes;
};
const offsetGetter = discussionId => () => {
const startMarker = discussionMarkers[`${discussionId}:start`];
const endMarker = discussionMarkers[`${discussionId}:end`];
return startMarker && endMarker && {
start: startMarker.offset,
end: endMarker.offset,
};
};
// Editor class appliers
const oldEditorClassAppliers = editorClassAppliers;
editorClassAppliers = {};
Object.keys(discussions).forEach((discussionId) => {
const classApplier = oldEditorClassAppliers[discussionId] || new EditorClassApplier(
classGetter('editor', discussionId),
offsetGetter(discussionId),
{ discussionId },
);
editorClassAppliers[discussionId] = classApplier;
});
// Clean unused class appliers
Object.entries(oldEditorClassAppliers).forEach(([discussionId, classApplier]) => {
if (!editorClassAppliers[discussionId]) {
classApplier.stop();
}
});
// Preview class appliers
const oldPreviewClassAppliers = previewClassAppliers;
previewClassAppliers = {};
Object.keys(discussions).forEach((discussionId) => {
const classApplier = oldPreviewClassAppliers[discussionId] || new PreviewClassApplier(
classGetter('preview', discussionId),
offsetGetter(discussionId),
{ discussionId },
);
previewClassAppliers[discussionId] = classApplier;
});
// Clean unused class appliers
Object.entries(oldPreviewClassAppliers).forEach(([discussionId, classApplier]) => {
if (!previewClassAppliers[discussionId]) {
classApplier.stop();
}
});
},
);
},
};
|
{
"pile_set_name": "Github"
}
|
package com.eveningoutpost.dexdrip.utils.bt;
import android.bluetooth.BluetoothDevice;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import com.eveningoutpost.dexdrip.Models.UserError;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
// jamorham
public class ConnectReceiver extends BroadcastReceiver {
private static final String TAG = ConnectReceiver.class.getSimpleName();
private static final ConcurrentHashMap<String, BtCallBack3> callbacks = new ConcurrentHashMap<>();
public static void addCallBack(BtCallBack3 callback, String name) {
callbacks.put(name, callback);
}
public static void removeCallBack(String name) {
callbacks.remove(name);
}
private synchronized void processCallBacks(String address, String status, BluetoothDevice device) {
for (Map.Entry<String, BtCallBack3> entry : callbacks.entrySet()) {
UserError.Log.d(TAG, "Callback: " + entry.getKey());
entry.getValue().btCallback3(address, status, null, null, device);
}
}
@Override
public void onReceive(Context context, Intent intent) {
try {
//noinspection ConstantConditions
if (intent.getAction().equals("android.bluetooth.device.action.ACL_CONNECTED")) {
final BluetoothDevice device = (BluetoothDevice) intent.getParcelableExtra("android.bluetooth.device.extra.DEVICE");
if (device != null) {
final String address = device.getAddress();
if (address != null) {
UserError.Log.d(TAG, "Connection notice: " + address);
processCallBacks(address, "CONNECTED", device);
}
}
}
} catch (NullPointerException e) {
UserError.Log.e(TAG, "NPE in onReceive: " + e);
}
}
}
|
{
"pile_set_name": "Github"
}
|
// Copyright (c) 2014-2015 The Notify Authors. All rights reserved.
// Use of this source code is governed by the MIT license that can be
// found in the LICENSE file.
// +build !darwin,!linux,!freebsd,!dragonfly,!netbsd,!openbsd,!windows
// +build !kqueue,!solaris
package notify
// Platform independent event values.
const (
osSpecificCreate Event = 1 << iota
osSpecificRemove
osSpecificWrite
osSpecificRename
// internal
// recursive is used to distinguish recursive eventsets from non-recursive ones
recursive
// omit is used for dispatching internal events; only those events are sent
// for which both the event and the watchpoint has omit in theirs event sets.
omit
)
var osestr = map[Event]string{}
type event struct{}
func (e *event) Event() (_ Event) { return }
func (e *event) Path() (_ string) { return }
func (e *event) Sys() (_ interface{}) { return }
func (e *event) isDir() (_ bool, _ error) { return }
|
{
"pile_set_name": "Github"
}
|
---
{
"title": "locate",
"language": "zh-CN"
}
---
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# locate
## description
### Syntax
`INT locate(VARCHAR substr, VARCHAR str[, INT pos])`
返回 substr 在 str 中出现的位置(从1开始计数)。如果指定第3个参数 pos,则从 str 以 pos 下标开始的字符串处开始查找 substr 出现的位置。如果没有找到,返回0
## example
```
mysql> SELECT LOCATE('bar', 'foobarbar');
+----------------------------+
| locate('bar', 'foobarbar') |
+----------------------------+
| 4 |
+----------------------------+
mysql> SELECT LOCATE('xbar', 'foobar');
+--------------------------+
| locate('xbar', 'foobar') |
+--------------------------+
| 0 |
+--------------------------+
mysql> SELECT LOCATE('bar', 'foobarbar', 5);
+-------------------------------+
| locate('bar', 'foobarbar', 5) |
+-------------------------------+
| 7 |
+-------------------------------+
```
## keyword
LOCATE
|
{
"pile_set_name": "Github"
}
|
// HomePage
import React, {PropTypes} from 'react';
import marked from 'marked';
import customMarkedRenderer from '../customMarkedRenderer';
import HomePageExampleModalButton from '../components/HomePageExampleModalButton.react';
require('../../css/HomePage.scss');
export default class HomePage extends React.Component {
render() {
return <div id="home-page">
<header>
<Container>
<h1>React Modal Dialog</h1>
<h2>A simple, idiomatic, and declarative way to launch modal dialogs in ReactJS</h2>
<HomePageExampleModalButton className="header-link"/>
<a className="header-link" href="https://github.com/qimingweng/react-modal-dialog">Github</a>
</Container>
</header>
<section className="body-section">
<Container>
<HomeContent/>
</Container>
</section>
</div>;
}
}
const markdown = require('raw!../../../../README.md');
class HomeContent extends React.Component {
render() {
return <div dangerouslySetInnerHTML={{
__html: marked.parse(markdown, {renderer: customMarkedRenderer}),
}}/>;
}
}
class Container extends React.Component {
static propTypes = {
children: PropTypes.node,
}
render() {
return <div style={{width: 600, margin: '0 auto'}}>
{this.props.children}
</div>;
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2015-2019 Alexandr Evstigneev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.perl5.lang.perl.idea.hierarchy.namespace.treestructures;
import com.intellij.psi.PsiElement;
import com.perl5.lang.perl.psi.PerlNamespaceDefinitionElement;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collection;
public class PerlSuperTypesHierarchyTreeStructure extends PerlSubTypesHierarchyTreeStructure {
public PerlSuperTypesHierarchyTreeStructure(@NotNull PsiElement element) {
super(element);
}
@Override
protected Collection<PsiElement> getSubElements(PsiElement element) {
assert element instanceof PerlNamespaceDefinitionElement;
return new ArrayList<>(((PerlNamespaceDefinitionElement)element).getParentNamespaceDefinitions());
}
}
|
{
"pile_set_name": "Github"
}
|
/** @file gsGaussRule.h
@brief Provides the Gauss-Legendre quadrature rule
This file is part of the G+Smo library.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
Author(s): A. Mantzaflaris
*/
#pragma once
#include <gsAssembler/gsQuadRule.h>
namespace gismo
{
/**
\brief Class that represents the (tensor) Gauss-Legendre quadrature rule
\ingroup Assembler
*/
template<class T>
class gsGaussRule GISMO_FINAL : public gsQuadRule<T>
{
public:
/// Default empty constructor
gsGaussRule() { }
/// Initialize a tensor-product Gauss quadrature rule with \a numNodes (direction-wise)
gsGaussRule(gsVector<index_t> const & numNodes,
const unsigned digits = 0 )
{
gsGaussRule::setNodes(numNodes, digits);
}
/// Initialize a 1D Gauss quadrature rule with \a numNodes
gsGaussRule(index_t numNodes, const unsigned digits = 0 )
{
this->setNodes(numNodes, digits);
}
/// Initialize a tensor-product Gauss quadrature rule for \a basis
/// using quA *deg_i + quB nodes (direction-wise)
gsGaussRule(const gsBasis<T> & basis, const T quA, const index_t quB, short_t fixDir = -1);
//const unsigned digits = std::numeric_limits<T>::digits10 );
/// Initialize a tensor-product Gauss quadrature rule for \a basis
/// using quA *deg_i + quB nodes (direction-wise). Values of quA
/// and quB are taken from the \a options
gsGaussRule(const gsBasis<T> & basis, const gsOptionList & options, short_t fixDir = -1);
//const unsigned digits = std::numeric_limits<T>::digits10 );
~gsGaussRule() { }
public:
// see gsQuadRule.h for documentation
void setNodes( gsVector<index_t> const & numNodes,
unsigned digits = 0 );
using gsQuadRule<T>::setNodes; // unhide base
private:
void init(const gsBasis<T> & basis, const T quA, const index_t quB, short_t fixDir);
/**
* @brief Computes the Gauss quadrature rule with \a n nodes in the interval [-1,1].
*
* This function is called by setNodes(), if lookupReference() (which is called first) returned \a false.
*/
static void computeReference(index_t n, gsVector<T> & x, gsVector<T> & w,
unsigned digits = 0 );
/**
*@brief Look up function for the Gauss quadrature rule in the interval [-1,1].
*
* When the member function setNodes() is called, it will first try to look up
*the corresponding Gauss rule. If this look up was not successful, the function computeReference() will be called.
*\return \a true if the look up was successful
*/
static bool lookupReference (index_t n, gsVector<T> & x, gsVector<T> & w);
}; // class gsGaussRule
} // namespace gismo
#ifndef GISMO_BUILD_LIB
#include GISMO_HPP_HEADER(gsGaussRule.hpp)
#endif
|
{
"pile_set_name": "Github"
}
|
; reads lines, prints them back when you hit 'enter'
; dies if you wait a while, because so far we never free memory
(function main [
(default-space:space-address <- new space:literal 30:literal)
(cursor-mode) ;? 1
; hook up stdin
(stdin:channel-address <- init-channel 1:literal)
(fork-helper send-keys-to-stdin:fn nil:literal/globals nil:literal/limit nil:literal/keyboard stdin:channel-address)
; buffer stdin
(buffered-stdin:channel-address <- init-channel 1:literal)
(fork-helper buffer-lines:fn nil:literal/globals nil:literal/limit stdin:channel-address buffered-stdin:channel-address)
{ begin
; now read characters from the buffer until 'enter' is typed
(s:string-address <- new "? ")
(print-string nil:literal/terminal s:string-address)
{ begin
(x:tagged-value buffered-stdin:channel-address/deref <- read buffered-stdin:channel-address)
(c:character <- maybe-coerce x:tagged-value character:literal)
;? ($print (("AAA " literal))) ;? 1
;? ($print c:character) ;? 1
;? ($print (("\n" literal))) ;? 1
(print-character nil:literal/terminal c:character)
(line-done?:boolean <- equal c:character ((#\newline literal)))
(loop-unless line-done?:boolean)
}
(loop)
}
])
|
{
"pile_set_name": "Github"
}
|
#-*-coding:utf-8-*-
import copy
from .func import xor, rotl, get_uint32_be, put_uint32_be, \
bytes_to_list, list_to_bytes, padding, unpadding
#Expanded SM4 box table
SM4_BOXES_TABLE = [
0xd6,0x90,0xe9,0xfe,0xcc,0xe1,0x3d,0xb7,0x16,0xb6,0x14,0xc2,0x28,0xfb,0x2c,
0x05,0x2b,0x67,0x9a,0x76,0x2a,0xbe,0x04,0xc3,0xaa,0x44,0x13,0x26,0x49,0x86,
0x06,0x99,0x9c,0x42,0x50,0xf4,0x91,0xef,0x98,0x7a,0x33,0x54,0x0b,0x43,0xed,
0xcf,0xac,0x62,0xe4,0xb3,0x1c,0xa9,0xc9,0x08,0xe8,0x95,0x80,0xdf,0x94,0xfa,
0x75,0x8f,0x3f,0xa6,0x47,0x07,0xa7,0xfc,0xf3,0x73,0x17,0xba,0x83,0x59,0x3c,
0x19,0xe6,0x85,0x4f,0xa8,0x68,0x6b,0x81,0xb2,0x71,0x64,0xda,0x8b,0xf8,0xeb,
0x0f,0x4b,0x70,0x56,0x9d,0x35,0x1e,0x24,0x0e,0x5e,0x63,0x58,0xd1,0xa2,0x25,
0x22,0x7c,0x3b,0x01,0x21,0x78,0x87,0xd4,0x00,0x46,0x57,0x9f,0xd3,0x27,0x52,
0x4c,0x36,0x02,0xe7,0xa0,0xc4,0xc8,0x9e,0xea,0xbf,0x8a,0xd2,0x40,0xc7,0x38,
0xb5,0xa3,0xf7,0xf2,0xce,0xf9,0x61,0x15,0xa1,0xe0,0xae,0x5d,0xa4,0x9b,0x34,
0x1a,0x55,0xad,0x93,0x32,0x30,0xf5,0x8c,0xb1,0xe3,0x1d,0xf6,0xe2,0x2e,0x82,
0x66,0xca,0x60,0xc0,0x29,0x23,0xab,0x0d,0x53,0x4e,0x6f,0xd5,0xdb,0x37,0x45,
0xde,0xfd,0x8e,0x2f,0x03,0xff,0x6a,0x72,0x6d,0x6c,0x5b,0x51,0x8d,0x1b,0xaf,
0x92,0xbb,0xdd,0xbc,0x7f,0x11,0xd9,0x5c,0x41,0x1f,0x10,0x5a,0xd8,0x0a,0xc1,
0x31,0x88,0xa5,0xcd,0x7b,0xbd,0x2d,0x74,0xd0,0x12,0xb8,0xe5,0xb4,0xb0,0x89,
0x69,0x97,0x4a,0x0c,0x96,0x77,0x7e,0x65,0xb9,0xf1,0x09,0xc5,0x6e,0xc6,0x84,
0x18,0xf0,0x7d,0xec,0x3a,0xdc,0x4d,0x20,0x79,0xee,0x5f,0x3e,0xd7,0xcb,0x39,
0x48,
]
# System parameter
SM4_FK = [0xa3b1bac6,0x56aa3350,0x677d9197,0xb27022dc]
# fixed parameter
SM4_CK = [
0x00070e15,0x1c232a31,0x383f464d,0x545b6269,
0x70777e85,0x8c939aa1,0xa8afb6bd,0xc4cbd2d9,
0xe0e7eef5,0xfc030a11,0x181f262d,0x343b4249,
0x50575e65,0x6c737a81,0x888f969d,0xa4abb2b9,
0xc0c7ced5,0xdce3eaf1,0xf8ff060d,0x141b2229,
0x30373e45,0x4c535a61,0x686f767d,0x848b9299,
0xa0a7aeb5,0xbcc3cad1,0xd8dfe6ed,0xf4fb0209,
0x10171e25,0x2c333a41,0x484f565d,0x646b7279
]
SM4_ENCRYPT = 0
SM4_DECRYPT = 1
class CryptSM4(object):
def __init__(self, mode=SM4_ENCRYPT):
self.sk = [0]*32
self.mode = mode
# Calculating round encryption key.
# args: [in] a: a is a 32 bits unsigned value;
# return: sk[i]: i{0,1,2,3,...31}.
@classmethod
def _round_key(cls, ka):
b = [0, 0, 0, 0]
a = put_uint32_be(ka)
b[0] = SM4_BOXES_TABLE[a[0]]
b[1] = SM4_BOXES_TABLE[a[1]]
b[2] = SM4_BOXES_TABLE[a[2]]
b[3] = SM4_BOXES_TABLE[a[3]]
bb = get_uint32_be(b[0:4])
rk = bb ^ (rotl(bb, 13)) ^ (rotl(bb, 23))
return rk
# Calculating and getting encryption/decryption contents.
# args: [in] x0: original contents;
# args: [in] x1: original contents;
# args: [in] x2: original contents;
# args: [in] x3: original contents;
# args: [in] rk: encryption/decryption key;
# return the contents of encryption/decryption contents.
@classmethod
def _f(cls, x0, x1, x2, x3, rk):
# "T algorithm" == "L algorithm" + "t algorithm".
# args: [in] a: a is a 32 bits unsigned value;
# return: c: c is calculated with line algorithm "L" and nonline algorithm "t"
def _sm4_l_t(ka):
b = [0, 0, 0, 0]
a = put_uint32_be(ka)
b[0] = SM4_BOXES_TABLE[a[0]]
b[1] = SM4_BOXES_TABLE[a[1]]
b[2] = SM4_BOXES_TABLE[a[2]]
b[3] = SM4_BOXES_TABLE[a[3]]
bb = get_uint32_be(b[0:4])
c = bb ^ (rotl(bb, 2)) ^ (rotl(bb, 10)) ^ (rotl(bb, 18)) ^ (rotl(bb, 24))
return c
return (x0 ^ _sm4_l_t(x1 ^ x2 ^ x3 ^ rk))
def set_key(self, key, mode):
key = bytes_to_list(key)
MK = [0, 0, 0, 0]
k = [0]*36
MK[0] = get_uint32_be(key[0:4])
MK[1] = get_uint32_be(key[4:8])
MK[2] = get_uint32_be(key[8:12])
MK[3] = get_uint32_be(key[12:16])
k[0:4] = xor(MK[0:4], SM4_FK[0:4])
for i in range(32):
k[i + 4] = k[i] ^ (
self._round_key(k[i + 1] ^ k[i + 2] ^ k[i + 3] ^ SM4_CK[i]))
self.sk[i] = k[i + 4]
self.mode = mode
if mode == SM4_DECRYPT:
for idx in range(16):
t = self.sk[idx]
self.sk[idx] = self.sk[31 - idx]
self.sk[31 - idx] = t
def one_round(self, sk, in_put):
out_put = []
ulbuf = [0]*36
ulbuf[0] = get_uint32_be(in_put[0:4])
ulbuf[1] = get_uint32_be(in_put[4:8])
ulbuf[2] = get_uint32_be(in_put[8:12])
ulbuf[3] = get_uint32_be(in_put[12:16])
for idx in range(32):
ulbuf[idx + 4] = self._f(ulbuf[idx], ulbuf[idx + 1], ulbuf[idx + 2], ulbuf[idx + 3], sk[idx])
out_put += put_uint32_be(ulbuf[35])
out_put += put_uint32_be(ulbuf[34])
out_put += put_uint32_be(ulbuf[33])
out_put += put_uint32_be(ulbuf[32])
return out_put
def crypt_ecb(self, input_data):
# SM4-ECB block encryption/decryption
input_data = bytes_to_list(input_data)
if self.mode == SM4_ENCRYPT:
input_data = padding(input_data)
length = len(input_data)
i = 0
output_data = []
while length > 0:
output_data += self.one_round(self.sk, input_data[i:i+16])
i += 16
length -= 16
if self.mode == SM4_DECRYPT:
return list_to_bytes(unpadding(output_data))
return list_to_bytes(output_data)
def crypt_cbc(self, iv, input_data):
#SM4-CBC buffer encryption/decryption
i = 0
output_data = []
tmp_input = [0]*16
iv = bytes_to_list(iv)
if self.mode == SM4_ENCRYPT:
input_data = padding(bytes_to_list(input_data))
length = len(input_data)
while length > 0:
tmp_input[0:16] = xor(input_data[i:i+16], iv[0:16])
output_data += self.one_round(self.sk, tmp_input[0:16])
iv = copy.deepcopy(output_data[i:i+16])
i += 16
length -= 16
return list_to_bytes(output_data)
else:
length = len(input_data)
while length > 0:
output_data += self.one_round(self.sk, input_data[i:i+16])
output_data[i:i+16] = xor(output_data[i:i+16], iv[0:16])
iv = copy.deepcopy(input_data[i:i + 16])
i += 16
length -= 16
return list_to_bytes(unpadding(output_data))
|
{
"pile_set_name": "Github"
}
|
<?xml version='1.0' ?>
<math display="block" xmlns="http://www.w3.org/1998/Math/MathML">
<mrow>
<mrow>
<mo stretchy="true" form="prefix">(</mo>
<mrow>
<mi>x</mi>
<mo>,</mo>
<mfrac>
<mi>y</mi>
<mi>z</mi>
</mfrac>
</mrow>
<mo stretchy="true" form="postfix">)</mo>
</mrow>
<mo>+</mo>
<mrow>
<mo stretchy="true" form="prefix">(</mo>
<mrow>
<mi>x</mi>
<mo>,</mo>
<mfrac>
<mi>y</mi>
<mi>z</mi>
</mfrac>
</mrow>
<mo stretchy="true" form="postfix">)</mo>
</mrow>
<mo>+</mo>
<mrow>
<mo stretchy="true" form="prefix">(</mo>
<mrow>
<mi>x</mi>
<mo>,</mo>
<mfrac>
<mi>y</mi>
<mi>z</mi>
</mfrac>
</mrow>
<mo stretchy="true" form="postfix">)</mo>
</mrow>
</mrow>
</math>
|
{
"pile_set_name": "Github"
}
|
以下常量适用于 `fs.open()`。
<table>
<tr>
<th>常量</th>
<th>说明</th>
</tr>
<tr>
<td><code>O_RDONLY</code></td>
<td>表明打开文件用于只读访问。</td>
</tr>
<tr>
<td><code>O_WRONLY</code></td>
<td>表明打开文件用于只写访问。</td>
</tr>
<tr>
<td><code>O_RDWR</code></td>
<td>表明打开文件用于读写访问。</td>
</tr>
<tr>
<td><code>O_CREAT</code></td>
<td>表明如果文件尚不存在则创建该文件。</td>
</tr>
<tr>
<td><code>O_EXCL</code></td>
<td>表明如果设置了 <code>O_CREAT</code> 标志且文件已存在,则打开文件应该失败。</td>
</tr>
<tr>
<td><code>O_NOCTTY</code></td>
<td>表明如果路径表示终端设备,则打开该路径不应该造成该终端变成进程的控制终端(如果进程还没有终端)。</td>
</tr>
<tr>
<td><code>O_TRUNC</code></td>
<td>表明如果文件存在且是普通的文件、并且文件成功打开以进行写入访问,则其长度应截断为零。</td>
</tr>
<tr>
<td><code>O_APPEND</code></td>
<td>表明数据将会追加到文件的末尾。</td>
</tr>
<tr>
<td><code>O_DIRECTORY</code></td>
<td>表明如果路径不是目录,则打开应该失败。</td>
</tr>
<tr>
<td><code>O_NOATIME</code></td>
<td>表明文件系统的读取访问将不再导致与文件相关联的 <code>atime</code> 信息的更新。
仅在 Linux 操作系统上可用。</td>
</tr>
<tr>
<td><code>O_NOFOLLOW</code></td>
<td>表明如果路径是符号链接,则打开应该失败。</td>
</tr>
<tr>
<td><code>O_SYNC</code></td>
<td>表明文件是为同步 I/O 打开的,写入操作将会等待文件的完整性。</td>
</tr>
<tr>
<td><code>O_DSYNC</code></td>
<td>表明文件是为同步 I/O 打开的,写入操作将会等待数据的完整性</td>
</tr>
<tr>
<td><code>O_SYMLINK</code></td>
<td>表明打开符号链接自身,而不是它指向的资源。</td>
</tr>
<tr>
<td><code>O_DIRECT</code></td>
<td>表明将尝试最小化文件 I/O 的缓存效果。</td>
</tr>
<tr>
<td><code>O_NONBLOCK</code></td>
<td>表明在可能的情况下以非阻塞模式打开文件。</td>
</tr>
<tr>
<td><code>UV_FS_O_FILEMAP</code></td>
<td>当设置后,将会使用内存文件的映射来访问文件。
此标志仅在 Windows 操作系统上可用。
在其他操作系统上,此标志会被忽略。</td>
</tr>
</table>
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_PLOT_PYTHON_H_
#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_PLOT_PYTHON_H_
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
namespace webrtc {
class PythonPlot final : public Plot {
public:
PythonPlot();
~PythonPlot() override;
void Draw() override;
};
class PythonPlotCollection final : public PlotCollection {
public:
explicit PythonPlotCollection(bool shared_xaxis = false);
~PythonPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
private:
bool shared_xaxis_;
};
} // namespace webrtc
#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_PLOT_PYTHON_H_
|
{
"pile_set_name": "Github"
}
|
/**************************************************************************//**
* @file library/Device/Holtek/HT32F5xxxx/Source/system_ht32f5xxxx_06.c
* @brief CMSIS Cortex-M0+ Device Peripheral Access Layer Source File
* for the Holtek HT32F5xxxx Device Series
* @version $Rev:: 3993 $
* @date $Date:: 2019-07-02 #$
*
* @note
* Copyright (C) Holtek Semiconductor Inc. All rights reserved.
*
* @par
* ARM Limited (ARM) supplies this software for Cortex-M processor-based
* microcontrollers. This file can be freely distributed within development
* tools that are supporting such ARM based processors.
*
* @par
* THIS SOFTWARE IS PROVIDED "AS IS". NO WARRANTIES, WHETHER EXPRESS, IMPLIED
* OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE APPLY TO THIS SOFTWARE.
* ARM SHALL NOT, IN ANY CIRCUMSTANCES, BE LIABLE FOR SPECIAL, INCIDENTAL, OR
* CONSEQUENTIAL DAMAGES, FOR ANY REASON WHATSOEVER.
*
******************************************************************************/
// Supported Device
// ========================================
// HT32F50343
//#define USE_HT32F50343
/** @addtogroup CMSIS
* @{
*/
/** @addtogroup HT32F5xxxx_system HT32F5xxxx System
* @{
*/
#include "ht32f5xxxx_01.h"
/** @addtogroup HT32F5xxxx_System_Private_Defines
* @{
*/
/*
//-------- <<< Use Configuration Wizard in Context Menu >>> ------------------
*/
/*--------------------- Clock Configuration ----------------------------------
//
// <q0> Enable High Speed Internal RC Oscillator (HSI)
// <i> Default HSI = ENABLE
//
// <q1> Enable High Speed External Crystal Oscillator (HSE)
// <i> Default HSE = DISABLE
//
// <q3> Enable Low Speed External Crystal Oscillator (LSE)
// <i> Default LSE = DISABLE
//
// <e4> Enable PLL
// <i> Default PLL = DISABLE
// <o5> PLL Clock Source
// <0=> CK_HSE
// <1=> CK_HSI
// <i> Default PLL clock source = CK_HSI
// <i> PLL source clock must be in the range of 4 MHz to 16 MHz
// <o6> PLL Clock Source Divider
// <0=> 1
// <1=> 2
// <i> Default PLL clock source divider = 1
// <o7> PLL Feedback Clock Divider (NF2): 1 ~ 16
// <1-16:1>
// <i> PLL feedback clock = PLL clock source x NF2
// <i> PLL feedback clock must be in the range of 24 MHz to 60 MHz
// <o8> PLL Output Clock Divider (NO2)
// <0=> 1
// <1=> 2
// <2=> 4
// <3=> 8
// <i> PLL output clock = PLL feedback clock / NO2
// <i> PLL output clock must be in the range of 4 MHz to 60 MHz
// </e>
//
// <h> SystemCoreClock Configuration (CK_AHB)
// <o9> SystemCoreClock Source
// <1=> CK_PLL
// <2=> CK_HSE
// <3=> CK_HSI
// <6=> CK_LSE
// <7=> CK_LSI
// <i> Default SystemCoreClock source = CK_HSI
// <o10> SystemCoreClock Source Divider
// <0=> 1
// <1=> 2
// <2=> 4
// <3=> 8
// <4=> 16
// <5=> 32
// <i> Default SystemCoreClock source divider = 1
// </h>
//
// <h> FLASH Configuration
// <o11> Wait state
// <0=> 0 WS
// <1=> 1 WS
// <2=> 2 WS
// <9=> AUTO
// <i> 0 WS: 1 kHz <= CK_AHB <= 20 MHz
// <i> 1 WS: 20 MHz < CK_AHB <= 40 MHz
// <i> 2 WS: 40 MHz < CK_AHB <= 60 MHz
// <q12> Pre-fetch Buffer Enable
// <i> Default pre-fetch buffer = ENABLE
// </h>
*/
#define HSI_ENABLE (1) /*!< 0: DISABLE, 1: ENABLE */
#define HSE_ENABLE (1) /*!< 0: DISABLE, 1: ENABLE */
#define LSI_ENABLE (1) /*!< 0: DISABLE, 1: ENABLE */
#define LSE_ENABLE (0) /*!< 0: DISABLE, 1: ENABLE */
#define PLL_ENABLE (1) /*!< 0: DISABLE, 1: ENABLE */
#define PLL_CLK_SRC (0) /*!< 0: HSE, 1: HSI */
#define PLL_CLK_SRC_DIV (1) /*!< 0: DIV1, 1: DIV2 */
#define PLL_NF2_DIV (15) /*!< 1~16: DIV1~DIV16 */
#define PLL_NO2_DIV (0) /*!< 0: DIV1, 1: DIV2, 2: DIV4, 3: DIV8 */
#define HCLK_SRC (1) /*!< 0: PLL, 1: PLL, 2: HSE, 3: HSI 6: LSE, 7: LSI */
#define HCLK_DIV (0) /*!< 0: DIV1, 1: DIV2, 2: DIV4, 3: DIV8, 4: DIV16, 5: DIV32 */
#define WAIT_STATE (9) /*!< 0: WS = 0, 1: WS = 1, 2: WS = 2, 9: AUTO */
#define PRE_FETCH_ENABLE (1) /*!< 0: DISABLE, 1: ENABLE */
#define DEINIT_ENABLE (1) /* Set 0 for reduce code size */
/*--------------------- WDT Configuration ----------------------------------
//
// <e0> Enable WDT Configuration
// <o1> WDT Prescaler Selection
// <0=> CK_WDT / 1
// <1=> CK_WDT / 2
// <2=> CK_WDT / 4
// <3=> CK_WDT / 8
// <4=> CK_WDT / 16
// <5=> CK_WDT / 32
// <6=> CK_WDT / 64
// <7=> CK_WDT / 128
// <o2> WDT Reload Value <1-4095:1>
// <q3> Enable WDT Reset function
// <o4> WDT Sleep Halt mode
// <0=> No halt
// <1=> Halt in DeepSleep1
// <2=> Halt in Sleep & DeepSleep1
// </e>
*/
#define WDT_ENABLE (0) /*!< 0: DISABLE, 1: ENABLE */
#define WDT_PRESCALER (5) /*!< 0: 1/1, 1: 1/2, 2: 1/4, 3: 1/8, 4: 1/16, 5: 1/32, 6: 1/64, 7: 1/128 */
#define WDT_RELOAD (2000) /*!< 0 ~ 4095, 12 bit */
#define WDT_RESET_ENABLE (1) /*!< 0: No Reset, 1: Reset when WDT over flow */
#define WDT_SLEEP_HALT (2) /*!< 0: No halt, 1: Halt in DeepSleep1, 2: Halt in Sleep & DeepSleep1 */
/**
* @brief Check HSI frequency
*/
#if (HSI_VALUE != 8000000UL)
#error "CK_HSI clock issue: must be 8 MHz!"
#endif
/**
* @brief Check HSE frequency
*/
#if ((HSE_VALUE < 4000000UL) || (HSE_VALUE > 16000000UL))
#error "CK_HSE clock issue: must be in the range of 4 MHz to 16 MHz!"
#endif
/**
* @brief Check LSI frequency
*/
#if (LSI_VALUE != 32000UL)
#error "CK_LSI clock issue: must be 32 kHz!"
#endif
/**
* @brief Check LSE frequency
*/
#if (LSE_VALUE != 32768UL)
#error "CK_LSE clock issue: must be 32.768 kHz!"
#endif
/**
* @brief CK_PLL definition
*/
#if (PLL_ENABLE == 1)
/* Get CK_VCO frequency */
#if (PLL_CLK_SRC == 1)
#if (HSI_ENABLE == 0)
#error "CK_PLL clock source issue: HSI has not been enabled"
#else
#define __CK_VCO ((HSI_VALUE >> PLL_CLK_SRC_DIV) * PLL_NF2_DIV) /*!< Select HSI as PLL source */
#endif
#else
#if (HSE_ENABLE == 0)
#error "CK_PLL clock source issue: HSE has not been enabled!"
#else
#define __CK_VCO ((HSE_VALUE >> PLL_CLK_SRC_DIV) * PLL_NF2_DIV) /*!< Select HSE as PLL source */
#endif
#endif
#define VCO_MIN 24000000UL
#define VCO_MAX 60000000UL
#define PLL_MIN 4000000UL
#define PLL_MAX 60000000UL
/* Check CK_VCO frequency */
#if ((__CK_VCO < VCO_MIN) || (__CK_VCO > VCO_MAX))
#error "CK_VCO clock issue: must be in the range!"
#endif
#define __CK_PLL (__CK_VCO >> PLL_NO2_DIV) /*!< Get CK_PLL frequency */
/* Check CK_PLL frequency */
#if ((__CK_PLL < PLL_MIN) || (__CK_PLL > PLL_MAX))
#error "CK_PLL clock issue: must be in the range!"
#endif
#endif
/**
* @brief CK_SYS definition
*/
#if (HCLK_SRC == 1)
#if (PLL_ENABLE == 1)
#define __CK_SYS __CK_PLL /*!< Select PLL as CK_SYS source */
#else
#error "CK_SYS clock source issue: PLL is not enable!"
#endif
#elif (HCLK_SRC == 2)
#if (HSE_ENABLE == 1)
#define __CK_SYS HSE_VALUE /*!< Select HSE as CK_SYS source */
#else
#error "CK_SYS clock source issue: HSE is not enable!"
#endif
#elif (HCLK_SRC == 3)
#if (HSI_ENABLE == 1)
#define __CK_SYS HSI_VALUE /*!< Select HSI as CK_SYS source */
#else
#error "CK_SYS clock source issue: HSI is not enable!"
#endif
#elif (HCLK_SRC == 6)
#if (LSE_ENABLE == 1)
#define __CK_SYS LSE_VALUE /*!< Select LSE as CK_SYS source */
#else
#error "CK_SYS clock source issue: LSE is not enable!"
#endif
#elif (HCLK_SRC == 7)
#if (LSI_ENABLE == 1)
#define __CK_SYS LSI_VALUE /*!< Select LSI as CK_SYS source */
#else
#error "CK_SYS clock source issue: LSI is not enable!"
#endif
#else
#error "CK_SYS clock source issue: No clock source is selected!"
#endif
/**
* @brief CK_AHB definition
*/
#define __CK_AHB (__CK_SYS >> HCLK_DIV) /*!< Get CK_AHB frequency */
#define CKAHB_MIN 1000UL
#define CKAHB_MAX 60000000UL
#define WS0_CLK 20000000UL
#define WS1_CLK 40000000UL
/* Check CK_AHB frequency */
#if ((__CK_AHB < CKAHB_MIN) || (__CK_AHB > CKAHB_MAX))
#error "CK_AHB clock issue: must be in the range!"
#endif
/* Check FLASH wait-state setting */
#if ((__CK_AHB > WS1_CLK) && (WAIT_STATE < 2) || \
(__CK_AHB > WS0_CLK) && (WAIT_STATE < 1))
#error "FLASH wait state configuration issue!"
#endif
/**
* @}
*/
/** @addtogroup HT32F5xxxx_System_Private_Variables
* @{
*/
__IO uint32_t SystemCoreClock = __CK_AHB; /*!< SystemCoreClock = CK_AHB */
/**
* @}
*/
/** @addtogroup HT32F5xxxx_System_Private_Functions
* @{
*/
/**
* @brief Setup the microcontroller system.
* Initializes the system clocks and the embedded Flash.
* @note This function should be used after reset.
* @retval None
*/
void SystemInit(void)
{
#if (WDT_ENABLE == 1)
HT_CKCU->APBCCR1 |= (0x1 << 4);
HT_WDT->PR = 0x35CA;
HT_WDT->MR1 = ((HT_WDT->MR1 & 0xFFF) | (WDT_PRESCALER << 12));
HT_WDT->MR0 = WDT_RELOAD | (WDT_RESET_ENABLE << 13) | (WDT_SLEEP_HALT << 14) | (0x1 << 16);
HT_WDT->CR = 0x5FA00001;
#else
#if (DEINIT_ENABLE == 1)
HT_RSTCU->APBPRST1 = (1 << 4);
#endif
#endif
SetBit_BB((u32)(&HT_CKCU->APBCCR1), 6); /* enable Backup domain register clock */
#if (DEINIT_ENABLE == 1)
/* De-init the setting */
HT_CKCU->AHBCCR &= ~(0x3 << 10); /* disable IP who may use PLL as source */
SetBit_BB((u32)(&HT_CKCU->GCCR), 11); /* enable HSI */
while (!GetBit_BB((u32)(&HT_CKCU->GCSR), 3)); /* wait for HSI ready */
HT_CKCU->GCCR = ((HT_CKCU->GCCR & ~7UL) | 3UL); /* select CK_SYS source */
while ((HT_CKCU->CKST & 7UL) != 3UL); /* wait for clock switch complete */
HT_FLASH->CFCR = (((HT_FLASH->CFCR) & ~7UL) | 1UL); /* set Wait State as 0 WS */
HT_CKCU->AHBCFGR = 0; /* set CK_AHB prescaler */
ResetBit_BB((u32)(&HT_CKCU->GCCR), 9); /* disable PLL */
SetBit_BB((u32)(&HT_CKCU->GCFGR), 8); /* select PLL source as HSI */
ResetBit_BB((u32)(&HT_CKCU->GCCR), 3); /* disable USB PLL */
SetBit_BB((u32)(&HT_CKCU->GCFGR), 9); /* select USB PLL source as HSI */
#endif
/* HSE initiation */
#if (HSE_ENABLE == 1)
SetBit_BB((u32)(&HT_CKCU->GCCR), 10); /* enable HSE */
while (!GetBit_BB((u32)(&HT_CKCU->GCSR), 2)); /* wait for HSE ready */
#endif
/* LSE initiation */
#if (LSE_ENABLE == 1)
do {
SetBit_BB((u32)(&HT_RTC->CR), 3); /* enable LSE */
} while (!GetBit_BB((u32)(&HT_RTC->CR), 3));
while (!GetBit_BB((u32)(&HT_CKCU->GCSR), 4)); /* wait for LSE ready */
#endif
ResetBit_BB((u32)(&HT_CKCU->APBCCR1), 6); /* disable Backup domain register clock */
/* LSI initiation */
#if (HCLK_SRC == 7)
while (!GetBit_BB((u32)(&HT_CKCU->GCSR), 5)); /* wait for LSI ready */
#endif
/* PLL initiation */
#if (PLL_ENABLE == 1)
HT_CKCU->PLLCFGR = ((PLL_NF2_DIV & 0x0F) << 23) | (PLL_NO2_DIV << 21); /* set PLL divider */
#if (PLL_CLK_SRC_DIV == 1)
SetBit_BB((u32)(&HT_CKCU->PLLCFGR), 28); /* set PLL clock source divider */
#else
ResetBit_BB((u32)(&HT_CKCU->PLLCFGR), 28); /* reset PLL clock source divider */
#endif
#if (PLL_CLK_SRC == 0)
ResetBit_BB((u32)(&HT_CKCU->GCFGR), 8); /* select PLL source as HSE */
#else
SetBit_BB((u32)(&HT_CKCU->GCFGR), 8); /* select PLL source as HSI */
#endif
SetBit_BB((u32)(&HT_CKCU->GCCR), 9); /* enable PLL */
while (!GetBit_BB((u32)(&HT_CKCU->GCSR), 1)){}; /* wait for PLL ready */
#endif
/* CK_AHB initiation */
#if (WAIT_STATE == 9)
#if (__CK_AHB > WS1_CLK)
HT_FLASH->CFCR = (((HT_FLASH->CFCR) & ~7UL) | 3UL); /* auto-select wait state */
#elif (__CK_AHB > WS0_CLK)
HT_FLASH->CFCR = (((HT_FLASH->CFCR) & ~7UL) | 2UL); /* auto-select wait state */
#endif
#else
HT_FLASH->CFCR = (((HT_FLASH->CFCR) & ~7UL) | (WAIT_STATE + 1)); /* manual wait state */
#endif
HT_CKCU->AHBCFGR = HCLK_DIV; /* set CK_AHB prescaler */
HT_CKCU->GCCR = ((HT_CKCU->GCCR & ~7UL) | HCLK_SRC); /* select CK_SYS source */
while ((HT_CKCU->CKST & 7UL) != HCLK_SRC); /* wait for clock switch complete */
/* Pre-fetch buffer configuration */
#if (PRE_FETCH_ENABLE == 0)
ResetBit_BB((u32)(&HT_FLASH->CFCR), 4); /* 0: pre-fetch disable, 1: pre-fetch enable */
#else
SetBit_BB((u32)(&HT_FLASH->CFCR), 4); /* 0: pre-fetch disable, 1: pre-fetch enable */
#endif
/* HSE power down */
#if ((HSE_ENABLE == 0) && (HCLK_SRC != 2) && ((PLL_ENABLE == 0) || (PLL_CLK_SRC == 1)))
ResetBit_BB((u32)(&HT_CKCU->GCCR), 10);
#endif
/* HSI power down */
#if ((HSI_ENABLE == 0) && (HCLK_SRC != 3) && ((PLL_ENABLE == 0) || (PLL_CLK_SRC == 0)))
ResetBit_BB((u32)(&HT_CKCU->GCCR), 11);
#endif
}
/**
* @brief Update SystemCoreClock
* @retval None
*/
void SystemCoreClockUpdate(void)
{
u32 SystemCoreClockDiv = HT_CKCU->AHBCFGR & 7UL;
u32 PllSourceClockDiv = (HT_CKCU->PLLCFGR >> 28) & 1UL;
u32 PllFeedbackClockDiv = (((HT_CKCU->PLLCFGR >> 23) & 15UL) == 0) ? (16) : ((HT_CKCU->PLLCFGR >> 23) & 15UL);
u32 PllOutputClockDiv = (HT_CKCU->PLLCFGR >> 21) & 3UL;
u32 SystemCoreClockSrc = HT_CKCU->CKST & 7UL;
/* Get system core clock according to global clock control & configuration registers */
if (SystemCoreClockSrc == 1)
{
if (GetBit_BB((u32)(&HT_CKCU->PLLCR), 31))
{
PllFeedbackClockDiv = 1;
PllOutputClockDiv = 0;
}
if (GetBit_BB((u32)(&HT_CKCU->GCFGR), 8))
{
SystemCoreClock = (((HSI_VALUE >> PllSourceClockDiv) * PllFeedbackClockDiv) >> PllOutputClockDiv) >> SystemCoreClockDiv;
}
else
{
SystemCoreClock = (((HSE_VALUE >> PllSourceClockDiv) * PllFeedbackClockDiv) >> PllOutputClockDiv) >> SystemCoreClockDiv;
}
}
else if (SystemCoreClockSrc == 2)
{
SystemCoreClock = HSE_VALUE >> SystemCoreClockDiv;
}
else if (SystemCoreClockSrc == 3)
{
SystemCoreClock = HSI_VALUE >> SystemCoreClockDiv;
}
else if (SystemCoreClockSrc == 6)
{
SystemCoreClock = LSE_VALUE >> SystemCoreClockDiv;
}
else if (SystemCoreClockSrc == 7)
{
SystemCoreClock = LSI_VALUE >> SystemCoreClockDiv;
}
}
/**
* @}
*/
/**
* @}
*/
/**
* @}
*/
/******************* (C) COPYRIGHT Holtek Semiconductor Inc. *****END OF FILE*** */
|
{
"pile_set_name": "Github"
}
|
---
id: bd7198d8c242eddfaeb5bd13
title: Show National Contiguity with a Force Directed Graph
challengeType: 3
videoUrl: ''
localeTitle: 用力导向图显示全国连续性
---
## Description
<section id="description"> <strong>目标:</strong>构建一个功能类似于此的<a href="https://codepen.io" target="_blank">CodePen.io</a>应用程序: <a href="https://codepen.io/freeCodeCamp/full/xVopBo" target="_blank">https</a> <strong>:</strong> <a href="https://codepen.io" target="_blank">//codepen.io/freeCodeCamp/full/xVopBo</a> 。完成以下<a href="https://en.wikipedia.org/wiki/User_story" target="_blank">用户故事</a> 。使用您需要的任何库或API。给它你自己的个人风格。 <strong>用户故事:</strong>我可以看到一个强制导向图,显示哪些国家/地区共享边界。 <strong>用户故事:</strong>我可以在其节点上看到每个国家/地区的旗帜。 <strong>提示:</strong>这是一个可用于构建此数据集的数据集: <a href="https://raw.githubusercontent.com/DealPete/forceDirected/master/countries.json" target="_blank">https</a> <strong>:</strong> //raw.githubusercontent.com/DealPete/forceDirected/master/countries.json <strong>提示:</strong>您可以在<a href="https://www.flag-sprites.com" target="_blank">https://www.flag-</a>创建国旗的spritesheet <a href="https://www.flag-sprites.com" target="_blank">sprites.com</a> 。如果卡住,请记得使用<a href="https://forum.freecodecamp.org/t/how-to-get-help-when-you-are-stuck-coding/19514" target="_blank">Read-Search-Ask</a> 。完成后,单击“我已完成此挑战”按钮并包含指向CodePen的链接。您可以通过Facebook上的朋友分享您的项目反馈。 </section>
## Instructions
<section id="instructions">
</section>
## Tests
<section id='tests'>
```yml
tests: []
```
</section>
## Challenge Seed
<section id='challengeSeed'>
</section>
## Solution
<section id='solution'>
```js
// solution required
```
/section>
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_45) on Mon Nov 14 12:37:05 UTC 2016 -->
<meta http-equiv="Content-Type" content="text/html" charset="utf-8">
<title>ScalaToolChain (Gradle API 3.2)</title>
<meta name="date" content="2016-11-14">
<link rel="stylesheet" type="text/css" href="../../../../../javadoc.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="ScalaToolChain (Gradle API 3.2)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li>Next Class</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/gradle/language/scala/toolchain/ScalaToolChain.html" target="_top">Frames</a></li>
<li><a href="ScalaToolChain.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li>Method</li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li>Method</li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.gradle.language.scala.toolchain</div>
<h2 title="Interface ScalaToolChain" class="title">Interface ScalaToolChain</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Superinterfaces:</dt>
<dd><a href="../../../../../org/gradle/api/Named.html" title="interface in org.gradle.api">Named</a>, <a href="../../../../../org/gradle/platform/base/ToolChain.html" title="interface in org.gradle.platform.base">ToolChain</a></dd>
</dl>
<hr>
<br>
<pre><a href="../../../../../org/gradle/api/Incubating.html" title="annotation in org.gradle.api">@Incubating</a>
public interface <span class="strong">ScalaToolChain</span>
extends <a href="../../../../../org/gradle/platform/base/ToolChain.html" title="interface in org.gradle.platform.base">ToolChain</a></pre>
<div class="block">A set of tools for building Scala applications</div>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== NESTED CLASS SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="nested_class_summary">
<!-- -->
</a>
<h3>Nested Class Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="nested_classes_inherited_from_class_org.gradle.api.Named">
<!-- -->
</a>
<h3>Nested classes/interfaces inherited from interface org.gradle.api.<a href="../../../../../org/gradle/api/Named.html" title="interface in org.gradle.api">Named</a></h3>
<code><a href="../../../../../org/gradle/api/Named.Namer.html" title="class in org.gradle.api">Named.Namer</a></code></li>
</ul>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_org.gradle.platform.base.ToolChain">
<!-- -->
</a>
<h3>Methods inherited from interface org.gradle.platform.base.<a href="../../../../../org/gradle/platform/base/ToolChain.html" title="interface in org.gradle.platform.base">ToolChain</a></h3>
<code><a href="../../../../../org/gradle/platform/base/ToolChain.html#getDisplayName()">getDisplayName</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods_inherited_from_class_org.gradle.api.Named">
<!-- -->
</a>
<h3>Methods inherited from interface org.gradle.api.<a href="../../../../../org/gradle/api/Named.html" title="interface in org.gradle.api">Named</a></h3>
<code><a href="../../../../../org/gradle/api/Named.html#getName()">getName</a></code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Class</li>
<li>Next Class</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/gradle/language/scala/toolchain/ScalaToolChain.html" target="_top">Frames</a></li>
<li><a href="ScalaToolChain.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li>Method</li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li>Method</li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
/* @flow */
/**
* Explode and normalize a pattern into its name and range.
*/
export function normalizePattern(
pattern: string,
): {
hasVersion: boolean,
name: string,
range: string,
} {
let hasVersion = false;
let range = 'latest';
let name = pattern;
// if we're a scope then remove the @ and add it back later
let isScoped = false;
if (name[0] === '@') {
isScoped = true;
name = name.slice(1);
}
// take first part as the name
const parts = name.split('@');
if (parts.length > 1) {
name = parts.shift();
range = parts.join('@');
if (range) {
hasVersion = true;
} else {
range = '*';
}
}
// add back @ scope suffix
if (isScoped) {
name = `@${name}`;
}
return {name, range, hasVersion};
}
|
{
"pile_set_name": "Github"
}
|
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
/**
* These are internal constructor parameters for PublicClass's internal constructor.
* @internal
*/
export interface IPublicClassInternalParameters {}
/**
* This is a public class
* @public
*/
export class PublicClass {
/** @internal */
constructor(parameters: IPublicClassInternalParameters) {}
/**
* This is a beta field
* @beta
*/
public betaField: string = 'hello';
/**
* This is a comment
*/
public undecoratedMember(): void {}
/**
* This is a beta comment
* @beta
*/
public betaMember(): void {}
/**
* This is an alpha comment
* @alpha
*/
public alphaMember(): void {}
/**
* This is an internal member
* @internal
*/
public _internalMember(): void {}
}
|
{
"pile_set_name": "Github"
}
|
<?php
namespace WPOnion\Ajax;
use WPOnion\Bridge\Ajax;
use WPOnion\Icons;
defined( 'ABSPATH' ) || exit;
/**
* Class Icon_Picker
*
* @package WPOnion\Ajax
* @author Varun Sridharan <[email protected]>
*/
class Icon_Picker extends Ajax {
protected $validate_field_path = false;
protected $validate_module = false;
/**
* Returns A List of enabled icons.
*
* @param array $libs
*
* @return mixed
*/
protected function enabled_icons( $libs ) {
$enabled = $this->request( 'enabled', true );
if ( wponion_is_bool_val( $enabled ) ) {
return $libs;
}
$enabled = ( ! is_array( $enabled ) ) ? array( $enabled ) : $enabled;
if ( wponion_is_array( $enabled ) ) {
foreach ( $libs as $name => $_n ) {
if ( ! in_array( $name, $enabled, true ) ) {
unset( $libs[ $name ] );
}
}
}
return $libs;
}
/**
* Returns A List of disabled icons.
*
* @param array $libs
*
* @return array
*/
protected function disabled_icons( $libs ) {
$disabled = $this->request( 'disabled', true );
if ( is_string( $disabled ) ) {
$disabled = array( $disabled );
}
if ( wponion_is_array( $disabled ) && wponion_is_array( $libs ) ) {
foreach ( $libs as $name => $_n ) {
if ( in_array( $name, $disabled, true ) ) {
unset( $libs[ $name ] );
}
}
}
return $libs;
}
/**
* Generates Picker Header.
*
* @param $libs
* @param $selected_lib
*
* @return string
*/
protected function picker_header( $libs, $selected_lib ) {
$html = '<div class="wponion-icon-picker-model-header"> <input type="text" placeholder="' . __( 'Search Icon', 'wponion' ) . '"/>';
if ( wponion_is_array( $libs ) && count( $libs ) > 1 ) {
$select = wpo_field( 'select' )->options( $libs )->only_field( true );
$html .= $select->render( $selected_lib );
}
$html .= '</div>';
return $html;
}
/**
* Generates Group.
*
* @param $title
* @param $icons
*
* @return string
*/
protected function create_group( $title, $icons ) {
/* @var \WPO\Fields\Accordion $acc */
$acc = wpo_field( 'accordion', sanitize_title( $title ) )->open();
$acc->heading( $title );
$acc->content( $icons );
return $acc->render( false, false );
}
/**
* Runs Ajax Request.
*/
public function run() {
wponion_timer( 'icon_render' );
$libs = $this->disabled_icons( $this->enabled_icons( Icons::icon_list() ) );
$libs = ( ! wponion_is_array( $libs ) ) ? array() : $libs;
if ( empty( $libs ) ) {
$this->error( __( 'Icon Library Not found', 'wponion' ) );
}
$group_icons = $this->request( 'group_icons', false );
$default_lib = wponion_is_array( $libs ) ? current( array_keys( $libs ) ) : $libs;
$is_first_load = $this->request( 'first_load', false );
$selected_lib = $this->request( 'wponion-icon-lib', $default_lib );
$selected_lib = ( ! isset( $libs[ $selected_lib ] ) ) ? $default_lib : $selected_lib;
$json = Icons::get( $selected_lib );
if ( ! wponion_is_array( $json->icons() ) || empty( $json->icons() ) ) {
$this->error( __( 'Icon Library Not found', 'wponion' ) );
}
$this->add_assets = $json->assets( false );
$html = '';
if ( 'no' !== $is_first_load ) {
$html .= $this->picker_header( $libs, $selected_lib );
$html .= '<div class="wponion-icon-picker-container-scroll">';
}
$html .= '<div id="' . $selected_lib . '" class="wponion-icon-framework"><div class="wponion-icon-picker-container">';
foreach ( $json->icons() as $key => $data ) {
/**
* Simple Icon Layouts
*
* @example Array Layout 1 : array(
* array( 'css' => 'ico icon-gear', 'title' => 'Title' )
* array( 'css' => 'ico icon-gear2', 'title' => 'Title2' )
* )
*
* @example Array Layout 2 : array( 'icon icon-gear', 'icon icon-gear-2' )
*
* @example Array Layout 3 : array(
* 'ico icon-gear' => array( 'title' => 'Title', 'terms' => 'Search1 Search2' )
* 'ico icon-gear-2' => array( 'title' => 'Title2', 'terms' => 'Search1 Search2' )
* )
*
* @example Array Layout 4 : array(
* 'ico icon-gear' => 'Icon Title',
* 'ico icon-gear-2' => 'Icon Title2',
* )
*
* Group Icon Layouts
*
* @example Group Array Layout 1 : array(
* "Group1"=>array('icon icon-gear','icon icon-gear2'),
* );
*
* Group Icons Layout 2 : all the above options except group layout 1
*/
$html .= $this->loop_icons( $key, $data, $group_icons );
}
$html .= '</div></div>';
$html .= ( 'no' !== $is_first_load ) ? '</div>' : '';
$this->json_success( array(
'html' => $html,
'timer' => wponion_timer( 'icon_render', true ),
) );
}
/**
* Loops Each Icon Framework and regenerates HTML.
*
* @param string|int $key
* @param string|array $data
* @param bool $is_group
*
* @return string
*/
protected function loop_icons( $key, $data, $is_group = false ) {
$key_num = ( is_numeric( $key ) );
$key_string = ( is_string( $key ) );
// Simple Array Layout 1
if ( $key_num && ( wponion_is_array( $data ) && isset( $data['css'] ) ) ) {
return $this->single_icon_html( $data );
}
// Simple Array Layout 2
if ( $key_num && ( ( wponion_is_array( $data ) && isset( $data['css'] ) ) || is_string( $data ) ) ) {
return $this->single_icon_html( $data );
}
// Simple Array Layout 3
if ( $key_string && wponion_is_array( $data ) && ( isset( $data['title'] ) || isset( $data['terms'] ) ) ) {
$data['css'] = $key;
return $this->single_icon_html( $data );
}
// Simple Array Layout 4
if ( $key_string && is_string( $data ) ) {
return $this->single_icon_html( array(
'css' => $key,
'title' => $data,
) );
}
// Group Array Layout 1
if ( $key_string && wponion_is_array( $data ) && isset( $data[0] ) ) {
$r = '';
foreach ( $data as $icon ) {
$r .= $this->single_icon_html( $icon );
}
return ( true === $is_group ) ? $this->create_group( $key, $r ) : $r;
}
// Group Array Layout 2
if ( $key_string && wponion_is_array( $data ) && ! isset( $data[0] ) ) {
$r = '';
foreach ( $data as $_key => $icon ) {
$r .= $this->loop_icons( $_key, $icon, $is_group );
}
return ( true === $is_group ) ? $this->create_group( $key, $r ) : $r;
}
return '';
}
/**
* Generates Single Icon's HTML.
*
* @param $icon
*
* @return string
*/
protected function single_icon_html( $icon ) {
$icon = wponion_parse_args_forced_values( 'css', $icon, Icons::icon_defaults() );
$title = ( empty( $icon['title'] ) ) ? $icon['css'] : $icon['title'];
$search = ( is_string( $icon['terms'] ) ) ? explode( ',', $icon['terms'] ) : $icon['terms'];
$search = ( is_array( $icon['terms'] ) ) ? implode( ' ', $search ) : $search;
$icon_html = wponion_icon( $icon['css'] );
return <<<HTML
<div class="wponion-icon-preview-wrap">
<span data-icon="{$icon['css']}" title="$title" class="wponion-icon-preview">$icon_html</span>
<span class="hidden wpo-icon-terms" style="display: none !important; visibility: hidden !important;">{$icon['css']} $search</span>
</div>
HTML;
}
}
|
{
"pile_set_name": "Github"
}
|
# Copyright 2015-2020 the openage authors. See copying.md for legal info.
# Find Python
# ~~~~~~~~~~~
#
# Find the Python interpreter, and related info.
#
# This is a wrapper around FindPython3.cmake,
# which sets many more variables:
# https://cmake.org/cmake/help/v3.12/module/FindPython3.html
#
# This file defines the following variables:
#
# PYTHON_FOUND - True when python was found.
# PYTHON - The full path to the Python interpreter.
# PYTHON_INCLUDE_DIRS - Include path for Python extensions.
# PYTHON_LIBRARIES - Library and Linker options for Python extensions.
#
# Also defines py_exec and py_get_config_var.
###############################################################
# You can manually pass the directory to an interpreter
# by defining PYTHON_DIR or passing -DPYTHON_DIR="<DIRECTORY>"
# to CMake. It's used as a hint where to look at
if(PYTHON_DIR)
set(Python3_ROOT_DIR "${PYTHON_DIR}")
endif()
###############################################################
# Never use the Windows Registry to find python
set(Python3_FIND_REGISTRY "NEVER")
# when there are multiple pythons, preferrably use the version of
# the default `python3` executable.
execute_process(
COMMAND "python3" -c "import platform; print(platform.python_version())"
OUTPUT_VARIABLE PYVER_OUTPUT
RESULT_VARIABLE PYVER_RETVAL
)
if(PYVER_RETVAL EQUAL 0)
string(REGEX MATCH "^[0-9]+\\.[0-9]+" PYTHON_MIN_VERSION "${PYVER_OUTPUT}")
set(need_exact_version "EXACT")
endif()
# use cmake's FindPython3 to locate library and interpreter
find_package(Python3 ${PYTHON_MIN_VERSION} ${need_exact_version} COMPONENTS Interpreter Development NumPy)
# python version string to cpython api test in modules/FindPython_test.cpp
set(PYTHON_MIN_VERSION_HEX "0x0${Python3_VERSION_MAJOR}0${Python3_VERSION_MINOR}0000")
# there's a static_assert that tests the Python version.
# that way, we verify the interpreter and the library version.
# (the interpreter provided us the library location)
try_compile(PYTHON_TEST_RESULT
"${CMAKE_BINARY_DIR}"
SOURCES "${CMAKE_CURRENT_LIST_DIR}/FindPython_test.cpp"
LINK_LIBRARIES Python3::Python
CXX_STANDARD 17
COMPILE_DEFINITIONS "-DTARGET_VERSION=${PYTHON_MIN_VERSION_HEX}"
OUTPUT_VARIABLE PYTHON_TEST_OUTPUT
)
if(NOT PYTHON_TEST_RESULT)
message(WARNING "!! No suitable Python interpreter was found !!\n")
message(WARNING "We need a Python interpreter >= ${PYTHON_MIN_VERSION} that is shipped with libpython and header files.\n")
message(WARNING "Specify the directory to your own with -DPYTHON_DIR=/dir/of/executable\n\n\n")
elseif(PYTHON_TEST_RESULT)
# Interfacing
# Python.cmake vars <= Python3.cmake vars
set(PYTHON ${Python3_EXECUTABLE} CACHE FILEPATH "Location of the Python interpreter" FORCE)
set(PYTHON_FOUND ${Python3_Interpreter_FOUND})
set(PYTHON_LIBRARIES ${Python3_LIBRARIES} CACHE STRING "Linker invocation for the Python library" FORCE)
set(PYTHON_INCLUDE_DIRS ${Python3_INCLUDE_DIRS} CACHE PATH "Location of the Python include dir" FORCE)
set(PYTHON_VERSION_STRING ${Python3_VERSION})
# Numpy.cmake vars <= Python3.cmake vars
set(NUMPY_FOUND ${Python3_NumPy_FOUND})
set(NUMPY_VERSION ${Python3_NumPy_VERSION})
set(NUMPY_INCLUDE_DIR ${Python3_NumPy_INCLUDE_DIRS} CACHE STRING "Linker invocation for the NumPy library" FORCE)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Python REQUIRED_VARS PYTHON PYTHON_INCLUDE_DIRS PYTHON_LIBRARIES)
endif()
unset(PYTHON_TEST_RESULT)
unset(PYTHON_TEST_OUTPUT)
# helper functions
function(py_exec STATEMENTS RESULTVAR)
# executes some python statement(s), and returns the result in RESULTVAR.
# aborts with a fatal error on error.
# no single quotes are allowed in STATEMENTS.
execute_process(
COMMAND "${PYTHON}" -c "${STATEMENTS}"
OUTPUT_VARIABLE PY_OUTPUT
RESULT_VARIABLE PY_RETVAL
)
if(NOT PY_RETVAL EQUAL 0)
message(FATAL_ERROR "failed:\n${PYTHON} -c '${STATEMENTS}'\n${PY_OUTPUT}")
endif()
string(STRIP "${PY_OUTPUT}" PY_OUTPUT_STRIPPED)
set("${RESULTVAR}" "${PY_OUTPUT_STRIPPED}" PARENT_SCOPE)
endfunction()
function(py_get_config_var VAR RESULTVAR)
# uses py_exec to determine a config var as in distutils.sysconfig.get_config_var().
py_exec(
"from distutils.sysconfig import get_config_var; print(get_config_var('${VAR}'))"
RESULT
)
set("${RESULTVAR}" "${RESULT}" PARENT_SCOPE)
endfunction()
|
{
"pile_set_name": "Github"
}
|
#import "VVQCComposition.h"
#import <VVBasics/VVBasics.h>
#import <Quartz/Quartz.h>
@implementation VVQCComposition
+ (BOOL) pathLooksLikeALegitComposition:(NSString *)p {
BOOL returnMe = NO;
if (p==nil)
return returnMe;
NSString *extension = [p pathExtension];
if (extension == nil)
return returnMe;
if ([[[p lastPathComponent] firstChar] isEqualToString:@"."])
return returnMe;
if ([extension length] != 3)
return returnMe;
if ([extension compare:@"qtz" options:NSCaseInsensitiveSearch range:NSMakeRange(0,3)] != NSOrderedSame)
return returnMe;
returnMe = YES;
return returnMe;
}
+ (id) compositionWithFile:(NSString *)p {
VVQCComposition *returnMe = nil;
returnMe = [[VVQCComposition alloc] initWithFile:p];
if (returnMe == nil) {
return nil;
}
return [returnMe autorelease];
}
- (id) initWithFile:(NSString *)p {
if (self = [super init]) {
// initialize everything to nil, so if i get released i won't have any issues
compositionPath = nil;
publishedInputsDict = nil;
inputKeys = nil;
publishedOutputsDict = nil;
category = nil;
description = nil;
protocols = nil;
// if the passed path is nil, if it doesn't have an extension, if it starts with a period, or doesn't end in "qtz"- bail
if (![VVQCComposition pathLooksLikeALegitComposition:p])
goto BAIL;
// now that i know i'm a qtz and i know i'm not invisible, proceed with loading!
NSDictionary *fileDict = nil;
NSDictionary *rootPatchDict = nil;
NSDictionary *stateDict = nil;
NSArray *portsArray = nil;
NSEnumerator *it = nil;
NSDictionary *portDict = nil;
NSString *tmpString = nil;
NSDictionary *inputSplitterDict = nil;
NSMutableDictionary *newPublishedPortDict = nil;
self = [super init];
compositionPath = [p retain];
publishedInputsDict = [[NSMutableDictionary dictionaryWithCapacity:0] retain];
inputKeys = [[NSMutableArray arrayWithCapacity:0] retain];
publishedOutputsDict = [[NSMutableDictionary dictionaryWithCapacity:0] retain];
category = nil;
description = nil;
hasLiveInput = NO;
// make a dict from the path
fileDict = [NSDictionary dictionaryWithContentsOfFile:compositionPath];
if (fileDict == nil)
goto BAIL;
[fileDict retain];
// get the category and description strings
category = [fileDict objectForKey:QCCompositionAttributeCategoryKey];
if (category != nil) {
//NSLog(@"\t\tcategory is %@",category);
[category retain];
}
description = [fileDict objectForKey:QCCompositionAttributeDescriptionKey];
if (description != nil) {
//NSLog(@"\t\tdescription is %@",description);
[description retain];
}
// get the root patch dict
rootPatchDict = [fileDict objectForKey:@"rootPatch"];
if (rootPatchDict == nil) {
VVRELEASE(fileDict);
goto BAIL;
}
[rootPatchDict retain];
// get the state dict
stateDict = [rootPatchDict objectForKey:@"state"];
if (stateDict == nil) {
VVRELEASE(fileDict);
VVRELEASE(rootPatchDict);
goto BAIL;
}
[stateDict retain];
protocols = [fileDict objectForKey:@"protocols"];
if (protocols != nil) {
[protocols retain];
}
// get the array of dicts which represent published inputs
portsArray = [stateDict objectForKey:@"publishedInputPorts"];
if (portsArray != nil) {
// run through all the dicts in the array; each dict describes a published input port's node
it = [portsArray objectEnumerator];
while (portDict = [it nextObject]) {
// determine the published name of the port
tmpString = [portDict objectForKey:@"key"];
if (tmpString != nil) {
// store the published name in the array of input keys (this preserves the order from the QC comp)
[inputKeys addObject:tmpString];
// make a new dict for my use, add it to me
newPublishedPortDict = [NSMutableDictionary dictionaryWithCapacity:0];
[publishedInputsDict setObject:newPublishedPortDict forKey:tmpString];
// find the node dict which corresponds to the input splitter
inputSplitterDict = [self
findSplitterForPublishedInputNamed:tmpString
inStateDict:stateDict];
// if there's a node dict...
if (inputSplitterDict != nil) {
// add the contents of the node dict's 'state' dict to it
[newPublishedPortDict addEntriesFromDictionary:[inputSplitterDict objectForKey:@"state"]];
// clear out some of the useless values
[self cleanUpStateDict:newPublishedPortDict];
}
// ...if there's no node dict, user published a non-splitter input! ignore that shit!
/*
else {
// set up the dict so it looks like a standard number input
[newPublishedPortDict setObject:VVSTRING(@"QCNumberPort") forKey:@"portClass"];
[newPublishedPortDict setObject:[NSNumber numberWithFloat:1.0] forKey:@"defaultVal"];
}
*/
}
}
}
// get the array of dicts which represent published outputs
portsArray = [stateDict objectForKey:@"publishedOutputPorts"];
if (portsArray != nil) {
// run through all the dicts in the array; each dict describes a published output port's node
it = [portsArray objectEnumerator];
while (portDict = [it nextObject]) {
// determine the published name of the port
tmpString = [portDict objectForKey:@"key"];
if (tmpString != nil) {
// make a new dict for my use, add it to me
newPublishedPortDict = [NSMutableDictionary dictionaryWithCapacity:0];
[publishedOutputsDict setObject:newPublishedPortDict forKey:tmpString];
// find the node dict which corresponds to the input splitter
inputSplitterDict = [self
findSplitterForPublishedOutputNamed:tmpString
inStateDict:stateDict];
// if there's a node dict...
if (inputSplitterDict != nil) {
// add the contents of the node dict's 'state' dict to it
[newPublishedPortDict addEntriesFromDictionary:[inputSplitterDict objectForKey:@"state"]];
// clear out some of the useless values
[self cleanUpStateDict:newPublishedPortDict];
}
// ...if there's no node dict, user published a non-splitter output! ignore that shit!
}
}
}
// run through the file dict's "inputParameters" (default vals)
NSDictionary *inputParametersDict = [fileDict objectForKey:@"inputParameters"];
NSEnumerator *keyIt;
NSString *keyPtr;
keyIt = [[inputParametersDict allKeys] objectEnumerator];
while (keyPtr = [keyIt nextObject]) {
// find my copy of the dict which represents the input port
newPublishedPortDict = [publishedInputsDict objectForKey:keyPtr];
if (newPublishedPortDict != nil) {
[newPublishedPortDict
setObject:[inputParametersDict objectForKey:keyPtr]
forKey:@"defaultVal"];
}
}
// while i've got the dict around, check to see if there's a live input in it...
hasLiveInput = [self findVideoInputInStateDict:stateDict];
// start releasing dicts i created earlier, and explicitly retained to prevent problems
VVRELEASE(fileDict);
VVRELEASE(rootPatchDict);
VVRELEASE(stateDict);
return self;
}
BAIL:
if (self != nil)
[self release];
return nil;
}
- (void) dealloc {
//NSLog(@"VVQCComposition:dealloc:");
VVRELEASE(compositionPath);
VVRELEASE(publishedInputsDict);
VVRELEASE(inputKeys);
VVRELEASE(publishedOutputsDict);
VVRELEASE(category);
VVRELEASE(description);
VVRELEASE(protocols);
[super dealloc];
}
- (NSDictionary *) findSplitterForPublishedInputNamed:(NSString *)n inStateDict:(NSDictionary *)d {
//NSLog(@"VVQCComposition:findSplitterForPublishedInputNamed:inStateDict: ... %@",n);
if (d == nil) {
return nil;
}
NSDictionary *nodeDict = nil;
NSEnumerator *it = nil;
id anObj = nil;
NSDictionary *inputPortDict = nil;
NSString *nodeName = nil;
NSString *nodePortName = nil;
anObj = [d objectForKey:@"publishedInputPorts"];
if (anObj == nil)
return nil;
// run through all the dicts in the input ports array
it = [anObj objectEnumerator];
while (inputPortDict = [it nextObject]) {
if ([[inputPortDict objectForKey:@"key"] isEqualToString:n])
break;
}
// if i couldn't find the dict in the input port array for the published input, return nil
if (inputPortDict == nil)
return nil;
// get the name of the node corresponding to this input object, and the port name of the node
nodeName = [inputPortDict objectForKey:@"node"];
nodePortName = [inputPortDict objectForKey:@"port"];
// bail if i can't find the node name or the node port name
if ((nodeName == nil) || (nodePortName == nil))
return nil;
// find the node with the corresponding node name
anObj = [d objectForKey:@"nodes"];
if (anObj == nil)
return nil;
it = [anObj objectEnumerator];
while (anObj = [it nextObject]) {
if ([[anObj objectForKey:@"key"] isEqualToString:nodeName]) {
nodeDict = anObj;
break;
}
}
// if the node's class is 'QCSplitter', i can just return it
if ([[nodeDict objectForKey:@"class"] isEqualToString:@"QCSplitter"])
return nodeDict;
// if the node isn't a 'QCSplitter'....
else {
// all nodes have a state dict; if this is a group, the state dict will have a "nodes" array
anObj = [nodeDict objectForKey:@"state"];
// if the state dict is nil, bail
if (anObj == nil) {
return nil;
}
// if the state dict has an object at its 'nodes' key, it's a group- parse down
if ([anObj objectForKey:@"nodes"] != nil) {
anObj = [self
findSplitterForPublishedInputNamed:nodePortName
inStateDict:anObj];
return anObj;
}
// if it doesn't, it's an actual object- return nil
else {
return nil;
}
/*
// if the node has a state dict, it's a group patch of some sort- repeat this process
anObj = [nodeDict objectForKey:@"state"];
if (anObj != nil) {
anObj = [self
findSplitterForPublishedInputNamed:nodePortName
inStateDict:anObj];
return anObj;
}
// else if there's no state dict, return nil!
else {
return nil;
}
*/
}
return nil;
}
- (NSDictionary *) findSplitterForPublishedOutputNamed:(NSString *)n inStateDict:(NSDictionary *)d {
if (d == nil) {
return nil;
}
NSDictionary *nodeDict = nil;
NSEnumerator *it = nil;
id anObj = nil;
NSDictionary *inputPortDict = nil;
NSString *nodeName = nil;
NSString *nodePortName = nil;
anObj = [d objectForKey:@"publishedOutputPorts"];
if (anObj == nil)
return nil;
// run through all the dicts in the input ports array
it = [anObj objectEnumerator];
while (inputPortDict = [it nextObject]) {
if ([[inputPortDict objectForKey:@"key"] isEqualToString:n])
break;
}
// if i couldn't find the dict in the input port array for the published input, return nil
if (inputPortDict == nil)
return nil;
// get the name of the node corresponding to this input object, and the port name of the node
nodeName = [inputPortDict objectForKey:@"node"];
nodePortName = [inputPortDict objectForKey:@"port"];
// bail if i can't find the node name or the node port name
if ((nodeName == nil) || (nodePortName == nil))
return nil;
// find the node with the corresponding node name
anObj = [d objectForKey:@"nodes"];
if (anObj == nil)
return nil;
it = [anObj objectEnumerator];
while (anObj = [it nextObject]) {
if ([[anObj objectForKey:@"key"] isEqualToString:nodeName]) {
nodeDict = anObj;
break;
}
}
// if the node's class is 'QCSplitter', i can just return it
if ([[nodeDict objectForKey:@"class"] isEqualToString:@"QCSplitter"])
return nodeDict;
// if the node isn't a 'QCSplitter'....
else {
// if the node has a state dict, it's a group patch of some sort- repeat this process
anObj = [nodeDict objectForKey:@"state"];
if (anObj != nil) {
anObj = [self
findSplitterForPublishedOutputNamed:nodePortName
inStateDict:anObj];
return anObj;
}
// else if there's no state dict, return nil!
else {
return nil;
}
}
return nil;
}
- (NSMutableArray *) arrayOfItemDictsOfClass:(NSString *)className {
//NSLog(@"%s ... %@",__func__,className);
if (className == nil)
return nil;
NSDictionary *fileDict = [NSDictionary dictionaryWithContentsOfFile:compositionPath];
if (fileDict==nil) {
NSLog(@"\t\terr: bailing, fileDict nil %s",__func__);
return nil;
}
NSDictionary *rootPatchDict = [fileDict objectForKey:@"rootPatch"];
if (rootPatchDict==nil) {
NSLog(@"\t\terr: bailing, rootPatchDict nil %s",__func__);
return nil;
}
NSDictionary *stateDict = [rootPatchDict objectForKey:@"state"];
if (stateDict==nil) {
NSLog(@"\t\terr: bailing, stateDict nil %s",__func__);
return nil;
}
NSMutableArray *returnMe = MUTARRAY;
[self _addItemDictsOfClass:className inStateDict:stateDict toArray:returnMe];
return returnMe;
}
- (void) _addItemDictsOfClass:(NSString *)c inStateDict:(NSDictionary *)d toArray:(NSMutableArray *)a {
if (c==nil || a==nil)
return;
// run through all the nodes in the state dict
NSArray *nodesArray = [d objectForKey:@"nodes"];
if (nodesArray!=nil && [nodesArray count]>0) {
for (NSDictionary *nodeDict in nodesArray) {
// check to see if this node's what i'm looking for- if it is, add a copy of the node dict (get rid of its sub-nodes!) to the array
NSString *nodeClass = [nodeDict objectForKey:@"class"];
if (nodeClass!=nil && [nodeClass isEqualToString:c]) {
NSMutableDictionary *mutCopy = [nodeDict mutableCopy];
[mutCopy removeObjectForKey:@"nodes"];
[a addObject:mutCopy];
[mutCopy release];
}
// call this method recursively on the 'state' dict of this node
NSDictionary *nodeStateDict = [nodeDict objectForKey:@"state"];
if (nodeStateDict != nil)
[self _addItemDictsOfClass:c inStateDict:nodeStateDict toArray:a];
}
}
}
- (BOOL) findVideoInputInStateDict:(NSDictionary *)d {
//NSLog(@"VVQCComposition:findVideoInputInStateDict:");
if ((d == nil) || ([d count] < 1)) {
return NO;
}
/*
state dicts have an array at "nodes".
each node is a dict- if the dict has a "state" dict, the state dict has an array at "nodes".
recursively call this method, return a YES if a video input was found.
*/
BOOL foundAnInput = NO;
NSArray *nodesArray;
NSEnumerator *nodeIt;
NSDictionary *nodeDict;
NSString *nodeClass;
//NSString *nodeKey;
//NSDictionary *nodeConnectionsDict;
//NSEnumerator *connectionIt;
//NSDictionary *connectionDict;
NSDictionary *nodeStateDict;
//NSString *nodeConnSourceNode;
//NSString *nodeConnSourcePort;
nodesArray = [d objectForKey:@"nodes"];
if ((nodesArray != nil) && ([nodesArray count] > 0)) {
nodeIt = [nodesArray objectEnumerator];
while ((nodeDict = [nodeIt nextObject]) && (!foundAnInput)) {
// if the node's class is 'QCVideoInput', i've found an input
nodeClass = [nodeDict objectForKey:@"class"];
if ((nodeClass != nil) && ([nodeClass isEqualToString:@"QCVideoInput"])) {
return YES;
/*
// check to see if the input's connected to anything: find the node's key...
nodeKey = [nodeDict objectForKey:@"key"];
if (nodeKey != nil) {
// run through the items in the state dict's "connections" dictionary...
nodeConnectionsDict = [d objectForKey:@"connections"];
if ((nodeConnectionsDict != nil) && ([nodeConnectionsDict count] > 0)) {
connectionIt = [nodeConnectionsDict objectEnumerator];
while (connectionDict = [connectionIt nextObject]) {
nodeConnSourceNode = [connectionDict objectForKey:@"sourceNode"];
nodeConnSourcePort = [connectionDict objectForKey:@"sourcePort"];
// if the connection dictionary's "sourceNode" matches the key of the input node....
if ((nodeConnSourceNode != nil) && ([nodeConnSourceNode isEqualToString:nodeKey])) {
// if the connection dictionary's "sourcePort" is "outputImage"- the input's in use, return YES
if ((nodeConnSourcePort != nil) && ([nodeConnSourcePort isEqualToString:@"outputImage"])) {
return YES;
}
}
}
}
}
*/
}
// if the ndoe has a state dict, call this method recursively, searching for the video input
nodeStateDict = [nodeDict objectForKey:@"state"];
if ((nodeStateDict != nil) && ([nodeStateDict count] > 0)) {
foundAnInput = [self findVideoInputInStateDict:nodeStateDict];
}
}
}
return foundAnInput;
}
- (void) cleanUpStateDict:(NSMutableDictionary *)d {
if (d == nil) {
return;
}
//[d removeObjectForKey:@"userInfo"]; // the 'userInfo' NSData object encodes an NSDictionary which has the "name" of the splitter and its x/y location within the QC composition
[d removeObjectForKey:@"version"];
[d removeObjectForKey:@"nodes"];
[d removeObjectForKey:@"publishedInputPorts"];
[d removeObjectForKey:@"publishedOutputPorts"];
[d removeObjectForKey:@"ivarInputPortStates"];
[d removeObjectForKey:@"systemInputPortStates"];
[d removeObjectForKey:@"connections"];
/* there's a 'userInfo' object which is an NSData blob- the NSData is an unkeyed archive for an
NSDictionary- i want to decode this dict and move some of its contents to the state dict */
NSData *blob = [d objectForKey:@"userInfo"];
if (blob != nil) {
NSDictionary *decoded = [NSUnarchiver unarchiveObjectWithData:blob];
if (decoded != nil) {
[d addEntriesFromDictionary:decoded];
}
[d removeObjectForKey:@"userInfo"];
[d removeObjectForKey:@"position"];
}
}
/*
- (id) valueForInputKey:(NSString *)k {
return nil;
}
*/
- (NSDictionary *) publishedInputsDict {
return [[publishedInputsDict retain] autorelease];
}
- (NSDictionary *) publishedOutputsDict {
return [[publishedOutputsDict retain] autorelease];
}
- (NSString *) compositionName {
if (compositionPath == nil)
return nil;
NSString *lastPathComponent = [compositionPath lastPathComponent];
if (lastPathComponent == nil)
return nil;
return [lastPathComponent stringByDeletingPathExtension];
}
- (NSString *) compositionPath {
return [[compositionPath retain] autorelease];
}
- (NSArray *) inputKeys {
return [[inputKeys retain] autorelease];
/*
if (publishedInputsDict == nil)
return nil;
return [publishedInputsDict allKeys];
*/
}
- (NSString *) category {
return category;
}
- (NSString *) description {
return description;
}
- (NSArray *) protocols {
return protocols;
}
- (BOOL) hasLiveInput {
return hasLiveInput;
}
- (BOOL) hasFXInput {
NSArray *inputStrings = [publishedInputsDict allKeys];
if ((inputStrings!=nil) && ([inputStrings count]>0)) {
for (NSString *tmpString in inputStrings) {
if ([tmpString denotesFXInput]) {
NSDictionary *portDict = [publishedInputsDict objectForKey:tmpString];
if (portDict != nil) {
NSString *portClass = [portDict objectForKey:@"portClass"];
if ((portClass!=nil)&&([portClass isEqualToString:@"QCGLImagePort"]))
return YES;
}
}
}
}
return NO;
}
- (BOOL) isCompositionMode {
//NSLog(@"%s ... %@",__func__,compositionPath);
NSArray *inputStrings = [publishedInputsDict allKeys];
if ((inputStrings!=nil) && ([inputStrings count]>0)) {
BOOL hasTopInput = NO;
BOOL hasBottomInput = NO;
// Setting this to yes for the time being because system QC comps use the time of the patch
BOOL hasOpacity = NO;
for (NSString *tmpString in inputStrings) {
if ([tmpString denotesCompositionTopImage]) {
//NSLog(@"\t\ttop image named %@ found",tmpString);
hasTopInput = YES;
}
else if ([tmpString denotesCompositionBottomImage]) {
//NSLog(@"\t\tbottom image named %@ found",tmpString);
hasBottomInput = YES;
}
else if ([tmpString denotesCompositionOpacity]) {
//NSLog(@"\t\topacity named %@ found",tmpString);
hasOpacity = YES;
}
}
if (hasTopInput && hasBottomInput && hasOpacity) {
//NSLog(@"\t\t***************** COMP MODE!");
return YES;
}
}
return NO;
}
- (BOOL) isTransitionMode {
if (protocols==nil)
return NO;
for (NSString *protocol in protocols) {
if ([protocol isEqualToString:QCCompositionProtocolGraphicTransition])
return YES;
//else if ([protocol isEqualToString:QCCompositionProtocolImageTransition])
// return YES;
}
return NO;
}
- (BOOL) isMusicVisualizer {
if (protocols==nil)
return NO;
for (NSString *protocol in protocols) {
if ([protocol isEqualToString:QCCompositionProtocolMusicVisualizer])
return YES;
}
return NO;
}
- (BOOL) isTXTSrc {
NSArray *inputStrings = [publishedInputsDict allKeys];
if ((inputStrings!=nil) && ([inputStrings count]>0)) {
BOOL hasFileInput = NO;
for (NSString *tmpString in inputStrings) {
if ([tmpString denotesTXTFileInput]) {
hasFileInput = YES;
break;
}
}
if (hasFileInput) {
return YES;
}
}
return NO;
}
- (BOOL) isIMGSrc {
NSArray *inputStrings = [publishedInputsDict allKeys];
if ((inputStrings!=nil) && ([inputStrings count]>0)) {
BOOL hasFileInput = NO;
for (NSString *tmpString in inputStrings) {
if ([tmpString denotesIMGFileInput]) {
hasFileInput = YES;
break;
}
}
if (hasFileInput) {
return YES;
}
}
return NO;
}
@end
|
{
"pile_set_name": "Github"
}
|
{
"name": "Default Web Site",
"state": "Started",
"physicalPath": "%SystemDrive%\\inetpub\\wwwroot",
"bindings": {
"Attributes": [
],
"ChildElements": [
],
"ElementTagName": "bindings",
"Methods": null,
"Schema": {
"AllowUnrecognizedAttributes": false,
"AttributeSchemas": "",
"ChildElementSchemas": null,
"CollectionSchema": "Microsoft.IIs.PowerShell.Framework.ConfigurationCollectionSchema",
"IsCollectionDefault": false,
"Name": "bindings"
},
"Collection": [
{
"value": "Microsoft.IIs.PowerShell.Framework.ConfigurationElement",
"protocol": "http",
"bindingInformation": "*:80:",
"sslFlags": 0,
"isDsMapperEnabled": false,
"certificateHash": "",
"certificateStoreName": ""
},
{
"value": "Microsoft.IIs.PowerShell.Framework.ConfigurationElement",
"protocol": "net.tcp",
"bindingInformation": "808:*",
"sslFlags": 0,
"isDsMapperEnabled": null,
"certificateHash": null,
"certificateStoreName": null
},
{
"value": "Microsoft.IIs.PowerShell.Framework.ConfigurationElement",
"protocol": "net.pipe",
"bindingInformation": "*",
"sslFlags": 0,
"isDsMapperEnabled": null,
"certificateHash": null,
"certificateStoreName": null
},
{
"value": "Microsoft.IIs.PowerShell.Framework.ConfigurationElement",
"protocol": "net.msmq",
"bindingInformation": "localhost",
"sslFlags": 0,
"isDsMapperEnabled": null,
"certificateHash": null,
"certificateStoreName": null
},
{
"value": "Microsoft.IIs.PowerShell.Framework.ConfigurationElement",
"protocol": "msmq.formatname",
"bindingInformation": "localhost",
"sslFlags": 0,
"isDsMapperEnabled": null,
"certificateHash": null,
"certificateStoreName": null
},
{
"value": "Microsoft.IIs.PowerShell.Framework.ConfigurationElement",
"protocol": "https",
"bindingInformation": "*:443:",
"sslFlags": 0,
"isDsMapperEnabled": false,
"certificateHash": "E024B9723C6EBCF17E933466F2B34D008B9334FB",
"certificateStoreName": "My"
}
]
},
"applicationPool": "DefaultAppPool"
}
|
{
"pile_set_name": "Github"
}
|
package log
import (
"fmt"
"os"
"regexp"
"strconv"
"strings"
"sync"
"time"
ll "github.com/evilsocket/islazy/log"
"github.com/evilsocket/islazy/tui"
"github.com/muraenateam/muraena/core"
)
type logger struct {
Writer *os.File
Level ll.Verbosity
FormatConfig FormatConfig
NoEffects bool
}
var (
lock = &sync.Mutex{}
loggers = map[string]logger{}
reEffects = []*regexp.Regexp{
regexp.MustCompile("\x033\\[\\d+m"),
regexp.MustCompile("\\\\e\\[\\d+m"),
regexp.MustCompile("\x1b\\[\\d+m"),
}
)
func Init(opt core.Options, isLogToFile bool, logFilePath string) {
noEffects := false
if !tui.Effects() {
noEffects = true
if *opt.NoColors {
fmt.Printf("\n\nWARNING: Terminal colors have been disabled, view will be very limited.\n\n")
} else {
fmt.Printf("\n\nWARNING: This terminal does not support colors, view will be very limited.\n\n")
}
}
logLevel := ll.INFO
if *opt.Debug {
logLevel = ll.DEBUG
}
config := FormatConfigBasic
config.Format = "{datetime} {level:color}{level:name}{reset} {message}"
// Console Log
err := AddOutput("", logLevel, config, noEffects)
if err != nil {
panic(err)
}
// File Log
if isLogToFile && logFilePath != "" {
err = AddOutput(logFilePath, logLevel, config, true)
if err != nil {
panic(err)
}
}
}
func AddOutput(path string, level ll.Verbosity, config FormatConfig, noEffects bool) (err error) {
var writer *os.File
if path == "" {
writer = os.Stdout
} else {
writer, err = os.OpenFile(path, os.O_APPEND|os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
return
}
}
lock.Lock()
loggers[path] = logger{writer, level, config, noEffects}
lock.Unlock()
return
}
/*
func CloseOutputs() {
for p, l := range loggers {
if p != "" {
l.Writer.Close()
}
}
}
func RemoveOutput(path string) error {
lock.Lock()
l, b := loggers[path]
lock.Unlock()
if b {
l.Writer.Close()
delete(loggers, path)
} else {
return errors.New("no output with this path")
}
return nil
}
*/
func (l *logger) emit(s string) {
// remove all effects if found
if l.NoEffects {
for _, re := range reEffects {
s = re.ReplaceAllString(s, "")
}
}
s = strings.Replace(s, "%", "%%", -1)
if _, err := fmt.Fprintf(l.Writer, s+string("\n")); err != nil {
fmt.Printf("Emit error: %+v", err)
}
}
func do(v ll.Verbosity, format string, args ...interface{}) {
lock.Lock()
defer lock.Unlock()
if len(loggers) <= 0 {
panic("No Output added to log")
}
for _, l := range loggers {
if l.Level > v {
continue
}
currMessage := format
if args != nil {
currMessage = fmt.Sprintf(format, args...)
}
tokens := map[string]func() string{
"{date}": func() string {
return time.Now().Format(l.FormatConfig.DateFormat)
},
"{time}": func() string {
return time.Now().Format(l.FormatConfig.TimeFormat)
},
"{datetime}": func() string {
return time.Now().Format(l.FormatConfig.DateTimeFormat)
},
"{level:value}": func() string {
return strconv.Itoa(int(v))
},
"{level:name}": func() string {
return ll.LevelNames[v]
},
"{level:color}": func() string {
return ll.LevelColors[v]
},
"{message}": func() string {
return currMessage
},
}
logLine := l.FormatConfig.Format
// process token -> callback
for token, cb := range tokens {
logLine = strings.Replace(logLine, token, cb(), -1)
}
// process token -> effect
for token, effect := range Effects {
logLine = strings.Replace(logLine, token, effect, -1)
}
// make sure an user error does not screw the log
if tui.HasEffect(logLine) && !strings.HasSuffix(logLine, tui.RESET) {
logLine += tui.RESET
}
l.emit(logLine)
}
}
// Raw emits a message without format to the logs.
func Raw(format string, args ...interface{}) {
lock.Lock()
defer lock.Unlock()
for _, l := range loggers {
currMessage := fmt.Sprintf(format, args...)
l.emit(currMessage)
}
}
// Debug emits a debug message.
func Debug(format string, args ...interface{}) {
do(ll.DEBUG, format, args...)
}
// Info emits an informative message.
func Info(format string, args ...interface{}) {
do(ll.INFO, format, args...)
}
// Important emits an important informative message.
func Important(format string, args ...interface{}) {
do(ll.IMPORTANT, format, args...)
}
// Warning emits a warning message.
func Warning(format string, args ...interface{}) {
do(ll.WARNING, format, args...)
}
// Error emits an error message.
func Error(format string, args ...interface{}) {
do(ll.ERROR, format, args...)
}
// Fatal emits a fatal error message and calls the ll.OnFatal callback.
func Fatal(format string, args ...interface{}) {
do(ll.FATAL, format, args...)
os.Exit(1)
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.worker;
import com.facebook.buck.core.util.immutables.BuckStyleValue;
import java.nio.file.Path;
@BuckStyleValue
interface WorkerProcessCommand {
/**
* Path to file which contains the arguments of the command. This content should be considered as
* an input for the command.
*/
Path getArgsPath();
/**
* Path to file where stdout can be written out. Remote process should output everything into this
* file instead of printing out into its own stdout.
*/
Path getStdOutPath();
/**
* Path to file where stderr can be written out. Remote process should output everything into this
* file instead of printing out into its own stderr.
*/
Path getStdErrPath();
}
|
{
"pile_set_name": "Github"
}
|
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.recoveryservices.backup.v2017_07_01;
import org.joda.time.DateTime;
import org.joda.time.Period;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* DPM workload-specific job task details.
*/
public class DpmJobTaskDetails {
/**
* The task display name.
*/
@JsonProperty(value = "taskId")
private String taskId;
/**
* The start time.
*/
@JsonProperty(value = "startTime")
private DateTime startTime;
/**
* The end time.
*/
@JsonProperty(value = "endTime")
private DateTime endTime;
/**
* Time elapsed for task.
*/
@JsonProperty(value = "duration")
private Period duration;
/**
* The status.
*/
@JsonProperty(value = "status")
private String status;
/**
* Get the task display name.
*
* @return the taskId value
*/
public String taskId() {
return this.taskId;
}
/**
* Set the task display name.
*
* @param taskId the taskId value to set
* @return the DpmJobTaskDetails object itself.
*/
public DpmJobTaskDetails withTaskId(String taskId) {
this.taskId = taskId;
return this;
}
/**
* Get the start time.
*
* @return the startTime value
*/
public DateTime startTime() {
return this.startTime;
}
/**
* Set the start time.
*
* @param startTime the startTime value to set
* @return the DpmJobTaskDetails object itself.
*/
public DpmJobTaskDetails withStartTime(DateTime startTime) {
this.startTime = startTime;
return this;
}
/**
* Get the end time.
*
* @return the endTime value
*/
public DateTime endTime() {
return this.endTime;
}
/**
* Set the end time.
*
* @param endTime the endTime value to set
* @return the DpmJobTaskDetails object itself.
*/
public DpmJobTaskDetails withEndTime(DateTime endTime) {
this.endTime = endTime;
return this;
}
/**
* Get time elapsed for task.
*
* @return the duration value
*/
public Period duration() {
return this.duration;
}
/**
* Set time elapsed for task.
*
* @param duration the duration value to set
* @return the DpmJobTaskDetails object itself.
*/
public DpmJobTaskDetails withDuration(Period duration) {
this.duration = duration;
return this;
}
/**
* Get the status.
*
* @return the status value
*/
public String status() {
return this.status;
}
/**
* Set the status.
*
* @param status the status value to set
* @return the DpmJobTaskDetails object itself.
*/
public DpmJobTaskDetails withStatus(String status) {
this.status = status;
return this;
}
}
|
{
"pile_set_name": "Github"
}
|
<app-loading-content
[isLoading]="!tradingPairs && !problemGettingPairs"
[noDataText]="problemGettingPairs ? 'exchange.problem-connecting' : 'exchange.offline'"
*ngIf="!activeTradingPair"
></app-loading-content>
<div class="-create" *ngIf="activeTradingPair">
<div [formGroup]="form" class="-form">
<div class="form-field">
<label for="fromAmount">
{{ 'exchange.you-send' | translate }}
<span *ngIf="form.hasError('min')" class="-error">
{{ 'exchange.min-amount' | translate }} {{ form.getError('min') }}
</span>
<span *ngIf="form.hasError('max')" class="-error">
{{ 'exchange.max-amount' | translate }} {{ form.getError('max') }}
</span>
</label>
<div class="-inputs">
<input type="text" formControlName="fromAmount" id="fromAmount">
<div class="-select">
<select formControlName="fromCoin">
<option *ngFor="let pair of tradingPairs" [value]="pair.from">{{ pair.from }}</option>
</select>
</div>
</div>
</div>
<div class="form-field">
<label for="toAmount">{{ 'exchange.you-get' | translate }}</label>
<div class="-inputs -not-allowed">
<input type="text" [value]="toAmount" readonly id="toAmount">
<div class="-input">
<input type="text" [value]="toCoin" readonly>
</div>
</div>
</div>
<div class="form-field">
<label for="toAddress">
<span>{{ 'exchange.to-address' | translate:{coin: toCoin} }}</span>
<span class="-select-address" (click)="selectAddress()">
{{ 'exchange.select' | translate }} <mat-icon>keyboard_arrow_down</mat-icon>
</span>
</label>
<input type="text" formControlName="toAddress" id="toAddress">
</div>
<div class="-buttons">
<mat-checkbox class="-check" (change)="setAgreement($event)" type="checkbox">
{{ 'exchange.agree-1' | translate }}
<a href="https://swaplab.cc/terms" target="_blank" rel="noreferrer nofollow">
{{ 'exchange.agree-2' | translate }}
</a>
{{ 'exchange.agree-3' | translate }}
<a href="https://swaplab.cc/privacy" target="_blank" rel="noreferrer nofollow">
{{ 'exchange.agree-4' | translate }}
</a>
</mat-checkbox>
<app-button class="primary" #exchangeButton (action)="exchange()" [disabled]="!form.valid">
{{ 'exchange.exchange-button' | translate }}
</app-button>
</div>
</div>
<div class="-info" *ngIf="activeTradingPair">
<div class="-item">
<div class="-key">{{ 'exchange.you-send' | translate }}</div>
<div class="-value">{{ sendAmount }} {{ form.get('fromCoin').value }}</div>
</div>
<div class="-item">
<div class="-key">{{ 'exchange.you-get' | translate }}</div>
<div class="-value">≈ {{ toAmount }} {{ toCoin }}</div>
</div>
<div class="-item">
<div class="-key">{{ 'exchange.to-address' | translate:{coin: toCoin} }}</div>
<div class="-value">{{ form.get('toAddress').value || '-' }}</div>
</div>
<div class="-item">
<div class="-key">{{ 'exchange.price' | translate }}</div>
<div class="-value">1 {{ form.get('fromCoin').value }} ≈ {{ activeTradingPair.price.toFixed(6) }} {{ toCoin }}</div>
</div>
<div class="-item">
<div class="-key">{{ 'exchange.time-15' | translate }}</div>
<div class="-value">≈ 15 minutes</div>
</div>
</div>
</div>
|
{
"pile_set_name": "Github"
}
|
import sys
from typing import List, Optional, Tuple
if sys.platform == "win32":
from . import Table
_Validation: Table
ActionText: Table
AdminExecuteSequence: Table
Condition: Table
AdminUISequence: Table
AdvtExecuteSequence: Table
AdvtUISequence: Table
AppId: Table
AppSearch: Table
Property: Table
BBControl: Table
Billboard: Table
Feature: Table
Binary: Table
BindImage: Table
File: Table
CCPSearch: Table
CheckBox: Table
Class: Table
Component: Table
Icon: Table
ProgId: Table
ComboBox: Table
CompLocator: Table
Complus: Table
Directory: Table
Control: Table
Dialog: Table
ControlCondition: Table
ControlEvent: Table
CreateFolder: Table
CustomAction: Table
DrLocator: Table
DuplicateFile: Table
Environment: Table
Error: Table
EventMapping: Table
Extension: Table
MIME: Table
FeatureComponents: Table
FileSFPCatalog: Table
SFPCatalog: Table
Font: Table
IniFile: Table
IniLocator: Table
InstallExecuteSequence: Table
InstallUISequence: Table
IsolatedComponent: Table
LaunchCondition: Table
ListBox: Table
ListView: Table
LockPermissions: Table
Media: Table
MoveFile: Table
MsiAssembly: Table
MsiAssemblyName: Table
MsiDigitalCertificate: Table
MsiDigitalSignature: Table
MsiFileHash: Table
MsiPatchHeaders: Table
ODBCAttribute: Table
ODBCDriver: Table
ODBCDataSource: Table
ODBCSourceAttribute: Table
ODBCTranslator: Table
Patch: Table
PatchPackage: Table
PublishComponent: Table
RadioButton: Table
Registry: Table
RegLocator: Table
RemoveFile: Table
RemoveIniFile: Table
RemoveRegistry: Table
ReserveCost: Table
SelfReg: Table
ServiceControl: Table
ServiceInstall: Table
Shortcut: Table
Signature: Table
TextStyle: Table
TypeLib: Table
UIText: Table
Upgrade: Table
Verb: Table
tables: List[Table]
_Validation_records: List[
Tuple[str, str, str, Optional[int], Optional[int], Optional[str], Optional[int], Optional[str], Optional[str], str]
]
|
{
"pile_set_name": "Github"
}
|
/* Capsule objects let you wrap a C "void *" pointer in a Python
object. They're a way of passing data through the Python interpreter
without creating your own custom type.
Capsules are used for communication between extension modules.
They provide a way for an extension module to export a C interface
to other extension modules, so that extension modules can use the
Python import mechanism to link to one another.
For more information, please see "c-api/capsule.html" in the
documentation.
*/
#ifndef Py_CAPSULE_H
#define Py_CAPSULE_H
#ifdef __cplusplus
extern "C" {
#endif
PyAPI_DATA(PyTypeObject) PyCapsule_Type;
typedef void (*PyCapsule_Destructor)(PyObject *);
#define PyCapsule_CheckExact(op) (Py_TYPE(op) == &PyCapsule_Type)
PyAPI_FUNC(PyObject *) PyCapsule_New(
void *pointer,
const char *name,
PyCapsule_Destructor destructor);
PyAPI_FUNC(void *) PyCapsule_GetPointer(PyObject *capsule, const char *name);
PyAPI_FUNC(PyCapsule_Destructor) PyCapsule_GetDestructor(PyObject *capsule);
PyAPI_FUNC(const char *) PyCapsule_GetName(PyObject *capsule);
PyAPI_FUNC(void *) PyCapsule_GetContext(PyObject *capsule);
PyAPI_FUNC(int) PyCapsule_IsValid(PyObject *capsule, const char *name);
PyAPI_FUNC(int) PyCapsule_SetPointer(PyObject *capsule, void *pointer);
PyAPI_FUNC(int) PyCapsule_SetDestructor(PyObject *capsule, PyCapsule_Destructor destructor);
PyAPI_FUNC(int) PyCapsule_SetName(PyObject *capsule, const char *name);
PyAPI_FUNC(int) PyCapsule_SetContext(PyObject *capsule, void *context);
PyAPI_FUNC(void *) PyCapsule_Import(const char *name, int no_block);
#ifdef __cplusplus
}
#endif
#endif /* !Py_CAPSULE_H */
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.milton.mini.controllers;
import io.milton.annotations.AccessControlList;
import io.milton.annotations.Authenticate;
import io.milton.annotations.ChildOf;
import io.milton.annotations.ChildrenOf;
import io.milton.annotations.Email;
import io.milton.annotations.Get;
import io.milton.annotations.MakeCollection;
import io.milton.annotations.ModifiedDate;
import io.milton.annotations.Name;
import io.milton.annotations.Post;
import io.milton.annotations.Principal;
import io.milton.annotations.ResourceController;
import io.milton.annotations.Root;
import io.milton.annotations.UniqueId;
import io.milton.annotations.Users;
import io.milton.cloud.common.CurrentDateService;
import io.milton.common.JsonResult;
import io.milton.common.ModelAndView;
import io.milton.config.HttpManagerBuilder;
import io.milton.config.InitListener;
import io.milton.http.HttpManager;
import io.milton.http.http11.auth.DigestResponse;
import io.milton.mini.PasswordManager;
import io.milton.resource.AccessControlledResource;
import io.milton.vfs.data.DataSession;
import io.milton.vfs.db.CalEvent;
import io.milton.vfs.db.Group;
import io.milton.vfs.db.GroupMembership;
import io.milton.vfs.db.GroupRole;
import io.milton.vfs.db.Organisation;
import io.milton.vfs.db.Profile;
import io.milton.vfs.db.Repository;
import io.milton.vfs.db.utils.SessionManager;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import org.hashsplit4j.api.BlobStore;
import org.hashsplit4j.api.HashStore;
import org.hibernate.Session;
import org.hibernate.Transaction;
@ResourceController
public class MiltonMiniController implements InitListener {
private static org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger(MiltonMiniController.class);
public enum Role {
Admin(AccessControlledResource.Priviledge.ALL),
Author(AccessControlledResource.Priviledge.READ, AccessControlledResource.Priviledge.WRITE, AccessControlledResource.Priviledge.UNLOCK),
Viewer(AccessControlledResource.Priviledge.READ),
UserAdmin(AccessControlledResource.Priviledge.READ_ACL, AccessControlledResource.Priviledge.WRITE_ACL);
AccessControlledResource.Priviledge[] privs;
Role(AccessControlledResource.Priviledge... privs) {
this.privs = privs;
}
}
@Inject
private BlobStore blobStore;
@Inject
private HashStore hashStore;
@Inject
private CurrentDateService currentDateService;
@Inject
private PasswordManager passwordManager;
@Inject
private SessionManager sessionManager;
@Override
public void beforeProtocolBuild(HttpManagerBuilder b) {
}
@Root
public MiltonMiniController getRoot() {
return this;
}
@Post
public JsonResult doHomePagePost(MiltonMiniController root) {
return new JsonResult(true);
}
@Name
public String getRootName(MiltonMiniController root) {
return "";
}
@Get
public String showHomePage(MiltonMiniController root) {
return "homePage";
}
@ChildOf
public LoginPage getLoginPage(MiltonMiniController root, String name) {
if (name.equals("login.html")) {
return new LoginPage();
}
return null;
}
@Get
public String showLoginPage(LoginPage p) {
return "login";
}
@ChildOf
public ScratchPage getScratchPage(MiltonMiniController root, String name) {
if (name.equals("scratch.html")) {
return new ScratchPage(name);
}
return null;
}
@Get
public String showScratchPage(ScratchPage p) {
return "scratch";
}
@ChildrenOf
public UsersHome getUsersHome(MiltonMiniController root) {
return new UsersHome();
}
@ChildrenOf
public RepoHome findRepoHome(MiltonMiniController root) {
Organisation org = Organisation.getRootOrg(SessionManager.session());
return new RepoHome("files", org);
}
@ChildrenOf
public List<Repository> findRepositories(RepoHome repoHome) {
return repoHome.org.getRepositories();
}
@ChildOf(pathSuffix = "new")
public Profile createNewProfile(UsersHome usersHome) {
return createNewProfile();
}
public Profile createNewProfile() {
Profile m = new Profile();
m.setCreatedDate(new Date());
m.setModifiedDate(new Date());
return m;
}
@Get(params = {"editMode"})
public ModelAndView showUserEditPage(Profile profile) throws UnsupportedEncodingException {
return new ModelAndView("profile", profile, "profileEditPage");
}
@Get
public ModelAndView showUserPage(Profile profile) throws UnsupportedEncodingException {
return new ModelAndView("profile", profile, "profilePage");
}
@AccessControlList
public Collection<AccessControlledResource.Priviledge> getUserPriviledges(MiltonMiniController target, Profile currentUser) {
return getPriviledges(currentUser);
}
@AccessControlList
public Collection<AccessControlledResource.Priviledge> getUserPriviledges(Profile target, Profile currentUser) {
if (currentUser == null) {
return AccessControlledResource.NONE;
} else {
if (currentUser.getId() == target.getId()) {
return AccessControlledResource.READ_WRITE;
} else {
return getPriviledges(currentUser);
}
}
}
@Post(bindData = true)
public Profile saveProfile(Profile profile) {
profile.setModifiedDate(new Date());
// HACK, add user to admin group until we build groups page
Organisation org = Organisation.getRootOrg(SessionManager.session());
Session session = SessionManager.session();
session.save(profile);
Group g = org.group("admin", session);
if (!g.containsUser(org, org, session)) {
log.info("Add to admin group, hack!");
GroupMembership gm = profile.createGroupMembership(g, org, session);
}
session.flush();
return profile;
}
@Post(params = {"password"})
public Profile changePassword(Profile profile, Map<String, String> params) {
log.info("changePassword: " + profile.getName());
profile.setModifiedDate(new Date());
String pwd = params.get("password");
passwordManager.setPassword(profile, pwd);
SessionManager.session().save(profile);
SessionManager.session().flush();
log.info("changed Password");
return profile;
}
@ChildrenOf
public SharedHome getSharedFoldersHome(MiltonMiniController root) {
Organisation org = Organisation.getRootOrg(SessionManager.session());
return new SharedHome(org);
}
@Get
public String showUsersHome(UsersHome usersHome) {
return "usersHome";
}
@ChildrenOf
public List<Repository> getSharedFolders(SharedHome sharedHome) {
return sharedHome.org.getRepositories();
}
@MakeCollection
public Repository createSharedFolder(SharedHome sharedHome, String newName, @Principal Profile user) {
Repository repo = sharedHome.org.createRepository(newName, user, SessionManager.session());
return repo;
}
@ChildrenOf
@Users
public List<Profile> getUsers(UsersHome usersHome) {
return Profile.findAll(SessionManager.session());
}
@Authenticate
public Boolean checkPasswordBasic(Profile user, String password) {
return passwordManager.verifyPassword(user, password);
}
@Authenticate
public Boolean checkPasswordDigest(Profile user, DigestResponse digest) {
return passwordManager.verifyDigest(digest, user);
}
@Email
public String getUserEmail(Profile profile) {
return profile.getEmail();
}
@UniqueId
public String getUniqueId(DataSession.DataNode m) {
// We'll just lock on the path
String id = buildUniqueId(m);
return id;
}
@ModifiedDate
public Date getModifiedDate(CalEvent m) {
return m.getModifiedDate();
}
private String buildUniqueId(DataSession.DataNode m) {
if (m.getParent() != null) {
return buildUniqueId(m.getParent()) + "/" + m.getName();
} else {
return m.getBranch().getId() + "";
}
}
@Override
public void beforeInit(HttpManagerBuilder b) {
}
@Override
public void afterInit(HttpManagerBuilder b) {
}
/**
* Check the root organisation exists
*
* @param b
* @param m
*/
@Override
public void afterBuild(HttpManagerBuilder b, HttpManager m) {
Session session = sessionManager.open();
Transaction tx = session.beginTransaction();
try {
Organisation rootOrg = Organisation.getRootOrg(session);
if (rootOrg == null) {
log.info("Creating root organisation");
rootOrg = new Organisation();
Date now = currentDateService.getNow();
rootOrg.setCreatedDate(now);
rootOrg.setModifiedDate(now);
rootOrg.setOrgId("root");
session.save(rootOrg);
session.flush();
}
Group adminGroup = rootOrg.group("admin", session);
if (adminGroup == null) {
adminGroup = rootOrg.createGroup("admin");
adminGroup.setRegistrationMode(Group.REGO_MODE_CLOSED);
session.save(adminGroup);
adminGroup.grantRole(Role.Admin.name(), session);
session.flush();
}
Profile admin = Profile.find("admin", session);
if (admin == null) {
admin = createNewProfile();
admin.setName("admin");
admin.setNickName("admin");
session.save(admin);
session.flush();
admin.createGroupMembership(adminGroup, rootOrg, session);
session.flush();
passwordManager.setPassword(admin, "password8");
session.flush();
}
Repository files = rootOrg.repository("files");
if (files == null) {
System.out.println("create directory");
files = rootOrg.createRepository("files", admin, session);
session.save(files);
session.flush();
}
System.out.println("files repo: " + files);
tx.commit();
} catch (Exception ex) {
log.error("EXception creating initial data", ex);
}
}
public class UsersHome {
public String getName() {
return "users";
}
}
public class SharedHome {
private final Organisation org;
public SharedHome(Organisation org) {
this.org = org;
}
public String getName() {
return "shared";
}
public Organisation getOrg() {
return org;
}
}
public class LoginPage {
public String getName() {
return "login.html";
}
}
public class ScratchPage {
private String name;
public ScratchPage(String name) {
this.name = name;
}
public String getName() {
return "name"; // DW
}
}
public class RepoHome {
private final String name;
private final Organisation org;
public RepoHome(String name, Organisation org) {
this.name = name;
this.org = org;
}
public String getName() {
return name;
}
}
public Set<AccessControlledResource.Priviledge> getPriviledges(Profile curUser) {
Set<AccessControlledResource.Priviledge> privs = new HashSet<>();
if (curUser != null) {
if (curUser.getMemberships() != null && !curUser.getMemberships().isEmpty()) {
for (GroupMembership m : curUser.getMemberships()) {
if (log.isTraceEnabled()) {
log.trace("getPriviledges: append privs for group membership: " + m.getGroupEntity().getName());
}
appendPriviledges(m.getGroupEntity(), privs);
}
} else {
log.trace("getPriviledges: user has no group memberships");
}
} else {
log.trace("anonymous request");
}
return privs;
}
private void appendPriviledges(Group g, Set<AccessControlledResource.Priviledge> privs) {
if (g.getGroupRoles() != null) {
for (GroupRole gr : g.getGroupRoles()) {
String roleName = gr.getRoleName();
Role role = Role.valueOf(roleName);
if (role != null) {
privs.addAll(Arrays.asList(role.privs));
} else {
log.warn("Role not found: " + roleName);
}
}
}
}
}
|
{
"pile_set_name": "Github"
}
|
stm8flash
=========
This is a free and opensource software distributed under the terms of the GNU General Public License v2.
For years, it was the only program that's able to communicate through the SWIM interface of ST-LINKs under Linux.
Since 2018, OpenOCD also offers the basic functionality, and also has support for on-target debugging.
As of early 2018, stm8flash has wider device support, and better support for memory read/write operations.
Synopsis
--------
```
stm8flash -c <stlink|stlinkv2|espstlink> -p <partname> [-s flash|eeprom|0x8000] [-r|-w|-v] <filename>
```
The supported file types are Intel Hex, Motorola S-Record and Raw Binary. The type is detected by the file extension.
Flash examples:
```nohighlight
./stm8flash -c stlink -p stm8s003f3 -w blinky.bin
./stm8flash -c stlink -p stm8s003f3 -w blinky.ihx
./stm8flash -c stlinkv2 -p stm8s003f3 -w blinky.ihx
./stm8flash -c stlink -p stm8s105c6 -w blinky.bin
./stm8flash -c stlinkv2 -p stm8l150 -w blinky.bin
```
EEPROM examples:
```nohighlight
./stm8flash -c stlinkv2 -p stm8s003f3 -s eeprom -r ee.bin
./stm8flash -c stlinkv2 -p stm8s003f3 -s eeprom -w ee.bin
./stm8flash -c stlinkv2 -p stm8s003f3 -s eeprom -v ee.bin
```
Support table
-------------
* ST-Link V1: flash/eeprom/opt
* flash2/eeprom2/opt2: ST-LINK/V2, ST-LINK/V2-1 and STLINK-V3
| MCU | flash | eeprom | opt | flash2 | eeprom2 | opt2 |
|-------------|-------|--------|------|--------|---------|-------|
| stlux385 | ? | ? | ? | ? | ? | ? |
| stlux???a | ? | ? | ? | ok | ok | ? |
| stm8af526? | ? | ? | ? | ? | ? | ? |
| stm8af528? | ? | ? | ? | ok | ? | ? |
| stm8af52a? | ? | ? | ? | ? | ? | ? |
| stm8af6213 | ? | ? | ? | ? | ? | ? |
| stm8af6223 | ? | ? | ? | ? | ? | ? |
| stm8af6223a | ? | ? | ? | ? | ? | ? |
| stm8af6226 | ? | ? | ? | ? | ? | ? |
| stm8af624? | ? | ? | ? | ? | ? | ? |
| stm8af6266 | ? | ? | ? | ? | ? | ? |
| stm8af6268 | ? | ? | ? | ? | ? | ? |
| stm8af6269 | ? | ? | ? | ? | ? | ? |
| stm8af628? | ? | ? | ? | ? | ? | ? |
| stm8af62a? | ? | ? | ? | ? | ? | ? |
| stm8al313? | ? | ? | ? | ? | ? | ? |
| stm8al314? | ? | ? | ? | ? | ? | ? |
| stm8al316? | ? | ? | ? | ? | ? | ? |
| stm8al318? | ? | ? | ? | ? | ? | ? |
| stm8al31e8? | ? | ? | ? | ? | ? | ? |
| stm8al3l4? | ? | ? | ? | ? | ? | ? |
| stm8al3l6? | ? | ? | ? | ok | ok | ok |
| stm8al3l8? | ? | ? | ? | ? | ? | ? |
| stm8al3le8? | ? | ? | ? | ? | ? | ? |
| stm8l001j3 | ? | ? | ? | ? | ? | ? |
| stm8l050j3 | ? | ? | ? | ? | ? | ? |
| stm8l051f3 | ok | ? | ? | ? | ? | ? |
| stm8l052c6 | ok | ? | ? | ? | ? | ? |
| stm8l052r8 | ok | ? | ? | ? | ? | ? |
| stm8l101f1 | ? | no | ? | ? | no | ? |
| stm8l101?2 | ? | no | ? | ? | no | ? |
| stm8l101?3 | ? | no | ? | ok | no | ? |
| stm8l151?2 | ? | ? | ? | ? | ? | ? |
| stm8l151?3 | ? | ? | ? | ? | ? | ? |
| stm8l151?4 | ok | ? | ? | ? | ? | ? |
| stm8l151?6 | ? | ? | ? | ? | ? | ? |
| stm8l151?8 | ? | ? | ? | ? | ? | ? |
| stm8l152?4 | ? | ? | ? | ? | ? | ? |
| stm8l152?6 | ok | FAIL | ? | ok | ok | ? |
| stm8l152?8 | ? | ? | ? | ok | ? | ? |
| stm8l162?8 | ? | ? | ? | ? | ? | ? |
| stm8s001j3 | ? | ? | ? | ok | ok | ? |
| stm8s003?3 | ok | FAIL | ? | ok | ok | ok |
| stm8s005?6 | ok | ? | ok | ok | ok | ok |
| stm8s007c8 | ? | ? | ? | ? | ? | ? |
| stm8s103f2 | ? | ? | ? | ? | ? | ? |
| stm8s103?3 | ok | ? | ? | ok | ? | ? |
| stm8s105?4 | ok | FAIL | ? | ok | ok | ? |
| stm8s105?6 | ok | ? | ? | ok | ? | ? |
| stm8s207c8 | ? | ? | ? | ? | ? | ? |
| stm8s207cb | ? | ? | ? | ? | ? | ? |
| stm8s207k8 | ? | ? | ? | ? | ? | ? |
| stm8s207m8 | ? | ? | ? | ? | ? | ? |
| stm8s207mb | ? | ? | ? | ? | ? | ? |
| stm8s207r8 | ? | ? | ? | ok | ? | ? |
| stm8s207rb | ? | ? | ? | ? | ? | ? |
| stm8s207s8 | ? | ? | ? | ? | ? | ? |
| stm8s207sb | ? | ? | ? | ? | ? | ? |
| stm8s207?6 | ? | ? | ? | ? | ? | ? |
| stm8s208c6 | ? | ? | ? | ok | ? | ? |
| stm8s208r6 | ? | ? | ? | ? | ? | ? |
| stm8s208s6 | ? | ? | ? | ? | ? | ? |
| stm8s208?8 | ? | ? | ? | ? | ? | ? |
| stm8s208?b | ? | ? | ? | ok | ? | ok |
| stm8s903?3 | ? | ? | ? | ok | ok | ok |
| stm8splnb1 | ? | ? | ? | ? | ? | ? |
| stm8tl5??4 | ? | no | ? | ? | no | ? |
| stnrg???a | ? | ? | ? | ok | ok | ? |
Legend:
* `ok` - Fully supported.
* `no` - Not supported.
* `?` - Not tested.
* `FAIL` - Not working. Needs fix.
|
{
"pile_set_name": "Github"
}
|
var Poker = require('./poker');
var POKER_CARDS = Poker.CARDS;
var HIGH_CARD = 1, // 单张
PAIR = 2, // 对子
STRAIGHT = 3, // 顺子
FLUSH = 4, // 同花
STRAIGHT_FLUSH = 5, // 同花顺
THREE = 6; // 豹子
var JINHUA_PATTERNS = {
0: 'invalid',
1: 'danzhang',
2: 'duizi',
3: 'shunzi',
4: 'tonghua',
5: 'tonghuashun',
6: 'baozi'
};
var Jinhua = {
HIGH_CARD: 1,
PAIR: 2,
STRAIGHT: 3,
FLUSH: 4,
STRAIGHT_FLUSH: 5,
THREE: 6,
PATTERNS: JINHUA_PATTERNS,
};
exports = module.exports = Jinhua;
Jinhua.sort = function(cards) {
if(cards.length != 3) return cards;
Poker.sortByNumber(cards);
var n1 = cards[1] & 0xf, n2 = cards[2] & 0xf;
if(n1 === n2) { // avoid pair at end
cards.push( cards.shift() );
}
return cards;
};
Jinhua.rank = function(cards) {
if(cards.length != 3) return 0;
Jinhua.sort(cards);
var c0 = cards[0] >> 4, c1 = cards[1] >> 4, c2 = cards[2] >> 4;
var n0 = cards[0] & 0xf, n1 = cards[1] & 0xf, n2 = cards[2] & 0xf;
var d0 = n0 - n1, d1 = n1 - n2;
var rank = (n0 << 8) | (n1 << 4) | n2;
var pattern = 0;
if((d0 === 0) && (d1 === 0)) {
pattern = THREE;
} else if((c0 === c1) && (c1 === c2)) {
if((d0 === 1) && (d1 === 1)) {
pattern = STRAIGHT_FLUSH;
} else {
pattern = FLUSH;
}
} else if((d0 === 1) && (d1 === 1)) {
pattern = STRAIGHT;
} else if((d0 === 0) || (d1 === 0)) {
pattern = PAIR;
} else {
pattern = HIGH_CARD;
}
return (pattern << 12) | rank;
};
Jinhua.pattern = function(cards) {
return Jinhua.rank(cards) >> 12;
};
Jinhua.patternString = function(cards) {
return JINHUA_PATTERNS[ Jinhua.rank(cards) >> 12 ];
};
Jinhua.compare = function(a, b) {
return Jinhua.rank(a) - Jinhua.rank(b);
};
Jinhua.view = function(cards) {
var rank = Jinhua.rank(cards);
var pattern = rank >> 12;
var str = Poker.visualize(cards).join(',') + ' -> ' + JINHUA_PATTERNS[ pattern ] + ', rank:' + rank;
console.log( str );
};
|
{
"pile_set_name": "Github"
}
|
echo "Just a dummy test so that we have a test target for //... on certain bazelci platforms with bazel_integration_test"
exit 0
|
{
"pile_set_name": "Github"
}
|
<?php
namespace Illuminate\Broadcasting;
class PresenceChannel extends Channel
{
/**
* Create a new channel instance.
*
* @param string $name
* @return void
*/
public function __construct($name)
{
parent::__construct('presence-'.$name);
}
}
|
{
"pile_set_name": "Github"
}
|
package pact
import (
"strconv"
"strings"
)
// Release version numbers
const (
// ProtocolVersion is the latest protocol version this package supports.
ProtocolVersion = DPOSStartVersion
// DPOSStartVersion is the protocol version which switch to DPOS protocol.
DPOSStartVersion uint32 = 20000
// EBIP001Version is the protocol version starts to support SPV protocol.
EBIP001Version uint32 = 10001
// MaxTxPerBlock is the maximux number of transactions allowed per block.
MaxTxPerBlock = 10000
// MaxBlocksPerMsg is the maximum number of blocks allowed per message.
MaxBlocksPerMsg = 500
// MaxTxPoolSize is the maximum size of txs allowed in transaction pool.
MaxTxPoolSize = 20000000
)
// MaxBlockSize is the maximum number of bytes allowed per block.
var MaxBlockSize uint32 = 8000000
// ServiceFlag identifies services supported by a peer.
type ServiceFlag uint64
const (
// SFNodeNetwork is a flag used to indicate a peer is a full node.
SFNodeNetwork ServiceFlag = 1 << iota
// SFTxFiltering is a flag used to indicate a peer supports transaction
// filtering.
SFTxFiltering
// SFNodeBloom is a flag used to indicate a peer supports bloom filtering.
SFNodeBloom
)
// Map of service flags back to their constant names for pretty printing.
var sfStrings = map[ServiceFlag]string{
SFNodeNetwork: "SFNodeNetwork",
SFTxFiltering: "SFTxFiltering",
SFNodeBloom: "SFNodeBloom",
}
// orderedSFStrings is an ordered list of service flags from highest to
// lowest.
var orderedSFStrings = []ServiceFlag{
SFNodeNetwork,
SFTxFiltering,
SFNodeBloom,
}
// String returns the ServiceFlag in human-readable form.
func (f ServiceFlag) String() string {
// No flags are set.
if f == 0 {
return "0x0"
}
// Add individual bit flags.
s := ""
for _, flag := range orderedSFStrings {
if f&flag == flag {
s += sfStrings[flag] + "|"
f -= flag
}
}
// Add any remaining flags which aren't accounted for as hex.
s = strings.TrimRight(s, "|")
if f != 0 {
s += "|0x" + strconv.FormatUint(uint64(f), 16)
}
s = strings.TrimLeft(s, "|")
return s
}
|
{
"pile_set_name": "Github"
}
|
name: continuous-integration
on: [push, pull_request]
jobs:
linux-os:
runs-on: ubuntu-16.04
steps:
- name: Update APT
run: sudo apt-get update
- name: Install Dependencies
run: |
sudo apt-get install --assume-yes build-essential autotools-dev automake libtool pkg-config \
libfreetype6-dev libluajit-5.1-dev libsdl2-dev libopenal-dev \
libogg-dev libvorbis-dev libmodplug-dev libmpg123-dev libtheora-dev
- name: Checkout
uses: actions/checkout@v2
- name: Pre-Configure
run: $PWD/platform/unix/automagic
- name: Configure
run: mkdir build && cd build && ../configure
- name: Build
run: cd build && make -j2
- name: Prepare appimagetool
run: |
cd build &&
wget https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-x86_64.AppImage -O appimagetool &&
chmod +x appimagetool &&
sudo apt install -y appstream
- name: Clone love-appimages
uses: actions/checkout@v2
with:
path: build/love-appimages
repository: pfirsich/love-appimages
- name: Build AppImage
run: |
cd build &&
python3 love-appimages/build.py .. AppDir --builddir build --appimage love.AppImage
- name: Artifact
uses: actions/upload-artifact@v2-preview
with:
name: love.AppImage
path: build/love.AppImage
windows-os:
runs-on: windows-latest
strategy:
matrix:
platform: [Win32, x64]
steps:
- name: Clone Megasource
uses: actions/checkout@v2
with:
path: megasource
repository: love2d/megasource
ref: master
- name: Checkout
uses: actions/checkout@v2
with:
path: megasource/libs/love
- name: Configure
shell: cmd
env:
PLATFORM: ${{ matrix.platform }}
run: cmake -Bbuild -Hmegasource -T v142 -A %PLATFORM% -DCMAKE_INSTALL_PREFIX=%CD%\install
- name: Install
shell: cmd
run: cmake --build build --config Release --target install -j2
- name: Artifact
uses: actions/upload-artifact@v1
with:
name: love-windows-${{ matrix.platform }}
path: install
|
{
"pile_set_name": "Github"
}
|
<?php
/*
* This file is part of Contao.
*
* (c) Leo Feyer
*
* @license LGPL-3.0-or-later
*/
namespace Contao\Model;
use Contao\Database;
use Contao\DcaExtractor;
/**
* The class reads the relation metadata from the DCA and creates the necessary
* JOIN queries to retrieve an object from the database.
*
* @author Leo Feyer <https://github.com/leofeyer>
*/
class QueryBuilder
{
/**
* Build a query based on the given options
*
* @param array $arrOptions The options array
*
* @return string The query string
*/
public static function find(array $arrOptions)
{
$objBase = DcaExtractor::getInstance($arrOptions['table']);
if (!$objBase->hasRelations())
{
$strQuery = "SELECT * FROM " . $arrOptions['table'];
}
else
{
$arrJoins = array();
$arrFields = array($arrOptions['table'] . ".*");
$intCount = 0;
foreach ($objBase->getRelations() as $strKey=>$arrConfig)
{
// Automatically join the single-relation records
if ($arrConfig['load'] == 'eager' || $arrOptions['eager'])
{
if ($arrConfig['type'] == 'hasOne' || $arrConfig['type'] == 'belongsTo')
{
++$intCount;
$objRelated = DcaExtractor::getInstance($arrConfig['table']);
foreach (array_keys($objRelated->getFields()) as $strField)
{
$arrFields[] = 'j' . $intCount . '.' . Database::quoteIdentifier($strField) . ' AS ' . $strKey . '__' . $strField;
}
$arrJoins[] = " LEFT JOIN " . $arrConfig['table'] . " j$intCount ON " . $arrOptions['table'] . "." . Database::quoteIdentifier($strKey) . "=j$intCount." . $arrConfig['field'];
}
}
}
// Generate the query
$strQuery = "SELECT " . implode(', ', $arrFields) . " FROM " . $arrOptions['table'] . implode("", $arrJoins);
}
// Where condition
if (isset($arrOptions['column']))
{
$strQuery .= " WHERE " . (\is_array($arrOptions['column']) ? implode(" AND ", $arrOptions['column']) : $arrOptions['table'] . '.' . Database::quoteIdentifier($arrOptions['column']) . "=?");
}
// Group by
if (isset($arrOptions['group']))
{
trigger_deprecation('contao/core-bundle', '4.4', 'Using the "group" option has been deprecated and will no longer work in Contao 5.0. See https://github.com/contao/contao/issues/1680.');
$strQuery .= " GROUP BY " . $arrOptions['group'];
}
// Having (see #6446)
if (isset($arrOptions['having']))
{
$strQuery .= " HAVING " . $arrOptions['having'];
}
// Order by
if (isset($arrOptions['order']))
{
$strQuery .= " ORDER BY " . $arrOptions['order'];
}
return $strQuery;
}
/**
* Build a query based on the given options to count the number of records
*
* @param array $arrOptions The options array
*
* @return string The query string
*/
public static function count(array $arrOptions)
{
$strQuery = "SELECT COUNT(*) AS count FROM " . $arrOptions['table'];
if ($arrOptions['column'] !== null)
{
$strQuery .= " WHERE " . (\is_array($arrOptions['column']) ? implode(" AND ", $arrOptions['column']) : $arrOptions['table'] . '.' . Database::quoteIdentifier($arrOptions['column']) . "=?");
}
return $strQuery;
}
}
class_alias(QueryBuilder::class, 'Model\QueryBuilder');
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "p2p/base/stun_port.h"
#include <utility>
#include <vector>
#include "api/transport/stun.h"
#include "p2p/base/connection.h"
#include "p2p/base/p2p_constants.h"
#include "p2p/base/port_allocator.h"
#include "rtc_base/checks.h"
#include "rtc_base/helpers.h"
#include "rtc_base/ip_address.h"
#include "rtc_base/logging.h"
#include "rtc_base/net_helpers.h"
#include "rtc_base/strings/string_builder.h"
namespace cricket {
// TODO(?): Move these to a common place (used in relayport too)
const int RETRY_TIMEOUT = 50 * 1000; // 50 seconds
// Stop logging errors in UDPPort::SendTo after we have logged
// |kSendErrorLogLimit| messages. Start again after a successful send.
const int kSendErrorLogLimit = 5;
// Handles a binding request sent to the STUN server.
class StunBindingRequest : public StunRequest {
public:
StunBindingRequest(UDPPort* port,
const rtc::SocketAddress& addr,
int64_t start_time)
: port_(port), server_addr_(addr), start_time_(start_time) {}
const rtc::SocketAddress& server_addr() const { return server_addr_; }
void Prepare(StunMessage* request) override {
request->SetType(STUN_BINDING_REQUEST);
}
void OnResponse(StunMessage* response) override {
const StunAddressAttribute* addr_attr =
response->GetAddress(STUN_ATTR_MAPPED_ADDRESS);
if (!addr_attr) {
RTC_LOG(LS_ERROR) << "Binding response missing mapped address.";
} else if (addr_attr->family() != STUN_ADDRESS_IPV4 &&
addr_attr->family() != STUN_ADDRESS_IPV6) {
RTC_LOG(LS_ERROR) << "Binding address has bad family";
} else {
rtc::SocketAddress addr(addr_attr->ipaddr(), addr_attr->port());
port_->OnStunBindingRequestSucceeded(this->Elapsed(), server_addr_, addr);
}
// The keep-alive requests will be stopped after its lifetime has passed.
if (WithinLifetime(rtc::TimeMillis())) {
port_->requests_.SendDelayed(
new StunBindingRequest(port_, server_addr_, start_time_),
port_->stun_keepalive_delay());
}
}
void OnErrorResponse(StunMessage* response) override {
const StunErrorCodeAttribute* attr = response->GetErrorCode();
if (!attr) {
RTC_LOG(LS_ERROR) << "Missing binding response error code.";
} else {
RTC_LOG(LS_ERROR) << "Binding error response:"
" class="
<< attr->eclass() << " number=" << attr->number()
<< " reason=" << attr->reason();
}
port_->OnStunBindingOrResolveRequestFailed(
server_addr_, attr ? attr->number() : STUN_ERROR_GLOBAL_FAILURE,
attr ? attr->reason()
: "STUN binding response with no error code attribute.");
int64_t now = rtc::TimeMillis();
if (WithinLifetime(now) &&
rtc::TimeDiff(now, start_time_) < RETRY_TIMEOUT) {
port_->requests_.SendDelayed(
new StunBindingRequest(port_, server_addr_, start_time_),
port_->stun_keepalive_delay());
}
}
void OnTimeout() override {
RTC_LOG(LS_ERROR) << "Binding request timed out from "
<< port_->GetLocalAddress().ToSensitiveString() << " ("
<< port_->Network()->name() << ")";
port_->OnStunBindingOrResolveRequestFailed(
server_addr_, SERVER_NOT_REACHABLE_ERROR,
"STUN allocate request timed out.");
}
private:
// Returns true if |now| is within the lifetime of the request (a negative
// lifetime means infinite).
bool WithinLifetime(int64_t now) const {
int lifetime = port_->stun_keepalive_lifetime();
return lifetime < 0 || rtc::TimeDiff(now, start_time_) <= lifetime;
}
UDPPort* port_;
const rtc::SocketAddress server_addr_;
int64_t start_time_;
};
UDPPort::AddressResolver::AddressResolver(rtc::PacketSocketFactory* factory)
: socket_factory_(factory) {}
UDPPort::AddressResolver::~AddressResolver() {
for (ResolverMap::iterator it = resolvers_.begin(); it != resolvers_.end();
++it) {
// TODO(guoweis): Change to asynchronous DNS resolution to prevent the hang
// when passing true to the Destroy() which is a safer way to avoid the code
// unloaded before the thread exits. Please see webrtc bug 5139.
it->second->Destroy(false);
}
}
void UDPPort::AddressResolver::Resolve(const rtc::SocketAddress& address) {
if (resolvers_.find(address) != resolvers_.end())
return;
rtc::AsyncResolverInterface* resolver =
socket_factory_->CreateAsyncResolver();
resolvers_.insert(std::pair<rtc::SocketAddress, rtc::AsyncResolverInterface*>(
address, resolver));
resolver->SignalDone.connect(this,
&UDPPort::AddressResolver::OnResolveResult);
resolver->Start(address);
}
bool UDPPort::AddressResolver::GetResolvedAddress(
const rtc::SocketAddress& input,
int family,
rtc::SocketAddress* output) const {
ResolverMap::const_iterator it = resolvers_.find(input);
if (it == resolvers_.end())
return false;
return it->second->GetResolvedAddress(family, output);
}
void UDPPort::AddressResolver::OnResolveResult(
rtc::AsyncResolverInterface* resolver) {
for (ResolverMap::iterator it = resolvers_.begin(); it != resolvers_.end();
++it) {
if (it->second == resolver) {
SignalDone(it->first, resolver->GetError());
return;
}
}
}
UDPPort::UDPPort(rtc::Thread* thread,
rtc::PacketSocketFactory* factory,
rtc::Network* network,
rtc::AsyncPacketSocket* socket,
const std::string& username,
const std::string& password,
const std::string& origin,
bool emit_local_for_anyaddress)
: Port(thread, LOCAL_PORT_TYPE, factory, network, username, password),
requests_(thread),
socket_(socket),
error_(0),
ready_(false),
stun_keepalive_delay_(STUN_KEEPALIVE_INTERVAL),
dscp_(rtc::DSCP_NO_CHANGE),
emit_local_for_anyaddress_(emit_local_for_anyaddress) {
requests_.set_origin(origin);
}
UDPPort::UDPPort(rtc::Thread* thread,
rtc::PacketSocketFactory* factory,
rtc::Network* network,
uint16_t min_port,
uint16_t max_port,
const std::string& username,
const std::string& password,
const std::string& origin,
bool emit_local_for_anyaddress)
: Port(thread,
LOCAL_PORT_TYPE,
factory,
network,
min_port,
max_port,
username,
password),
requests_(thread),
socket_(nullptr),
error_(0),
ready_(false),
stun_keepalive_delay_(STUN_KEEPALIVE_INTERVAL),
dscp_(rtc::DSCP_NO_CHANGE),
emit_local_for_anyaddress_(emit_local_for_anyaddress) {
requests_.set_origin(origin);
}
bool UDPPort::Init() {
stun_keepalive_lifetime_ = GetStunKeepaliveLifetime();
if (!SharedSocket()) {
RTC_DCHECK(socket_ == nullptr);
socket_ = socket_factory()->CreateUdpSocket(
rtc::SocketAddress(Network()->GetBestIP(), 0), min_port(), max_port());
if (!socket_) {
RTC_LOG(LS_WARNING) << ToString() << ": UDP socket creation failed";
return false;
}
socket_->SignalReadPacket.connect(this, &UDPPort::OnReadPacket);
}
socket_->SignalSentPacket.connect(this, &UDPPort::OnSentPacket);
socket_->SignalReadyToSend.connect(this, &UDPPort::OnReadyToSend);
socket_->SignalAddressReady.connect(this, &UDPPort::OnLocalAddressReady);
requests_.SignalSendPacket.connect(this, &UDPPort::OnSendPacket);
return true;
}
UDPPort::~UDPPort() {
if (!SharedSocket())
delete socket_;
}
void UDPPort::PrepareAddress() {
RTC_DCHECK(requests_.empty());
if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) {
OnLocalAddressReady(socket_, socket_->GetLocalAddress());
}
}
void UDPPort::MaybePrepareStunCandidate() {
// Sending binding request to the STUN server if address is available to
// prepare STUN candidate.
if (!server_addresses_.empty()) {
SendStunBindingRequests();
} else {
// Port is done allocating candidates.
MaybeSetPortCompleteOrError();
}
}
Connection* UDPPort::CreateConnection(const Candidate& address,
CandidateOrigin origin) {
if (!SupportsProtocol(address.protocol())) {
return nullptr;
}
if (!IsCompatibleAddress(address.address())) {
return nullptr;
}
// In addition to DCHECK-ing the non-emptiness of local candidates, we also
// skip this Port with null if there are latent bugs to violate it; otherwise
// it would lead to a crash when accessing the local candidate of the
// connection that would be created below.
if (Candidates().empty()) {
RTC_NOTREACHED();
return nullptr;
}
// When the socket is shared, the srflx candidate is gathered by the UDPPort.
// The assumption here is that
// 1) if the IP concealment with mDNS is not enabled, the gathering of the
// host candidate of this port (which is synchronous),
// 2) or otherwise if enabled, the start of name registration of the host
// candidate (as the start of asynchronous gathering)
// is always before the gathering of a srflx candidate (and any prflx
// candidate).
//
// See also the definition of MdnsNameRegistrationStatus::kNotStarted in
// port.h.
RTC_DCHECK(!SharedSocket() || Candidates()[0].type() == LOCAL_PORT_TYPE ||
mdns_name_registration_status() !=
MdnsNameRegistrationStatus::kNotStarted);
Connection* conn = new ProxyConnection(this, 0, address);
AddOrReplaceConnection(conn);
return conn;
}
int UDPPort::SendTo(const void* data,
size_t size,
const rtc::SocketAddress& addr,
const rtc::PacketOptions& options,
bool payload) {
rtc::PacketOptions modified_options(options);
CopyPortInformationToPacketInfo(&modified_options.info_signaled_after_sent);
int sent = socket_->SendTo(data, size, addr, modified_options);
if (sent < 0) {
error_ = socket_->GetError();
// Rate limiting added for crbug.com/856088.
// TODO(webrtc:9622): Use general rate limiting mechanism once it exists.
if (send_error_count_ < kSendErrorLogLimit) {
++send_error_count_;
RTC_LOG(LS_ERROR) << ToString() << ": UDP send of " << size
<< " bytes failed with error " << error_;
}
} else {
send_error_count_ = 0;
}
return sent;
}
void UDPPort::UpdateNetworkCost() {
Port::UpdateNetworkCost();
stun_keepalive_lifetime_ = GetStunKeepaliveLifetime();
}
rtc::DiffServCodePoint UDPPort::StunDscpValue() const {
return dscp_;
}
int UDPPort::SetOption(rtc::Socket::Option opt, int value) {
if (opt == rtc::Socket::OPT_DSCP) {
// Save value for future packets we instantiate.
dscp_ = static_cast<rtc::DiffServCodePoint>(value);
}
return socket_->SetOption(opt, value);
}
int UDPPort::GetOption(rtc::Socket::Option opt, int* value) {
return socket_->GetOption(opt, value);
}
int UDPPort::GetError() {
return error_;
}
bool UDPPort::HandleIncomingPacket(rtc::AsyncPacketSocket* socket,
const char* data,
size_t size,
const rtc::SocketAddress& remote_addr,
int64_t packet_time_us) {
// All packets given to UDP port will be consumed.
OnReadPacket(socket, data, size, remote_addr, packet_time_us);
return true;
}
bool UDPPort::SupportsProtocol(const std::string& protocol) const {
return protocol == UDP_PROTOCOL_NAME;
}
ProtocolType UDPPort::GetProtocol() const {
return PROTO_UDP;
}
void UDPPort::GetStunStats(absl::optional<StunStats>* stats) {
*stats = stats_;
}
void UDPPort::set_stun_keepalive_delay(const absl::optional<int>& delay) {
stun_keepalive_delay_ = delay.value_or(STUN_KEEPALIVE_INTERVAL);
}
void UDPPort::OnLocalAddressReady(rtc::AsyncPacketSocket* socket,
const rtc::SocketAddress& address) {
// When adapter enumeration is disabled and binding to the any address, the
// default local address will be issued as a candidate instead if
// |emit_local_for_anyaddress| is true. This is to allow connectivity for
// applications which absolutely requires a HOST candidate.
rtc::SocketAddress addr = address;
// If MaybeSetDefaultLocalAddress fails, we keep the "any" IP so that at
// least the port is listening.
MaybeSetDefaultLocalAddress(&addr);
AddAddress(addr, addr, rtc::SocketAddress(), UDP_PROTOCOL_NAME, "", "",
LOCAL_PORT_TYPE, ICE_TYPE_PREFERENCE_HOST, 0, "", false);
MaybePrepareStunCandidate();
}
void UDPPort::PostAddAddress(bool is_final) {
MaybeSetPortCompleteOrError();
}
void UDPPort::OnReadPacket(rtc::AsyncPacketSocket* socket,
const char* data,
size_t size,
const rtc::SocketAddress& remote_addr,
const int64_t& packet_time_us) {
RTC_DCHECK(socket == socket_);
RTC_DCHECK(!remote_addr.IsUnresolvedIP());
// Look for a response from the STUN server.
// Even if the response doesn't match one of our outstanding requests, we
// will eat it because it might be a response to a retransmitted packet, and
// we already cleared the request when we got the first response.
if (server_addresses_.find(remote_addr) != server_addresses_.end()) {
requests_.CheckResponse(data, size);
return;
}
if (Connection* conn = GetConnection(remote_addr)) {
conn->OnReadPacket(data, size, packet_time_us);
} else {
Port::OnReadPacket(data, size, remote_addr, PROTO_UDP);
}
}
void UDPPort::OnSentPacket(rtc::AsyncPacketSocket* socket,
const rtc::SentPacket& sent_packet) {
PortInterface::SignalSentPacket(sent_packet);
}
void UDPPort::OnReadyToSend(rtc::AsyncPacketSocket* socket) {
Port::OnReadyToSend();
}
void UDPPort::SendStunBindingRequests() {
// We will keep pinging the stun server to make sure our NAT pin-hole stays
// open until the deadline (specified in SendStunBindingRequest).
RTC_DCHECK(requests_.empty());
for (ServerAddresses::const_iterator it = server_addresses_.begin();
it != server_addresses_.end(); ++it) {
SendStunBindingRequest(*it);
}
}
void UDPPort::ResolveStunAddress(const rtc::SocketAddress& stun_addr) {
if (!resolver_) {
resolver_.reset(new AddressResolver(socket_factory()));
resolver_->SignalDone.connect(this, &UDPPort::OnResolveResult);
}
RTC_LOG(LS_INFO) << ToString() << ": Starting STUN host lookup for "
<< stun_addr.ToSensitiveString();
resolver_->Resolve(stun_addr);
}
void UDPPort::OnResolveResult(const rtc::SocketAddress& input, int error) {
RTC_DCHECK(resolver_.get() != nullptr);
rtc::SocketAddress resolved;
if (error != 0 || !resolver_->GetResolvedAddress(
input, Network()->GetBestIP().family(), &resolved)) {
RTC_LOG(LS_WARNING) << ToString()
<< ": StunPort: stun host lookup received error "
<< error;
OnStunBindingOrResolveRequestFailed(input, SERVER_NOT_REACHABLE_ERROR,
"STUN host lookup received error.");
return;
}
server_addresses_.erase(input);
if (server_addresses_.find(resolved) == server_addresses_.end()) {
server_addresses_.insert(resolved);
SendStunBindingRequest(resolved);
}
}
void UDPPort::SendStunBindingRequest(const rtc::SocketAddress& stun_addr) {
if (stun_addr.IsUnresolvedIP()) {
ResolveStunAddress(stun_addr);
} else if (socket_->GetState() == rtc::AsyncPacketSocket::STATE_BOUND) {
// Check if |server_addr_| is compatible with the port's ip.
if (IsCompatibleAddress(stun_addr)) {
requests_.Send(
new StunBindingRequest(this, stun_addr, rtc::TimeMillis()));
} else {
// Since we can't send stun messages to the server, we should mark this
// port ready.
const char* reason = "STUN server address is incompatible.";
RTC_LOG(LS_WARNING) << reason;
OnStunBindingOrResolveRequestFailed(stun_addr, SERVER_NOT_REACHABLE_ERROR,
reason);
}
}
}
bool UDPPort::MaybeSetDefaultLocalAddress(rtc::SocketAddress* addr) const {
if (!addr->IsAnyIP() || !emit_local_for_anyaddress_ ||
!Network()->default_local_address_provider()) {
return true;
}
rtc::IPAddress default_address;
bool result =
Network()->default_local_address_provider()->GetDefaultLocalAddress(
addr->family(), &default_address);
if (!result || default_address.IsNil()) {
return false;
}
addr->SetIP(default_address);
return true;
}
void UDPPort::OnStunBindingRequestSucceeded(
int rtt_ms,
const rtc::SocketAddress& stun_server_addr,
const rtc::SocketAddress& stun_reflected_addr) {
RTC_DCHECK(stats_.stun_binding_responses_received <
stats_.stun_binding_requests_sent);
stats_.stun_binding_responses_received++;
stats_.stun_binding_rtt_ms_total += rtt_ms;
stats_.stun_binding_rtt_ms_squared_total += rtt_ms * rtt_ms;
if (bind_request_succeeded_servers_.find(stun_server_addr) !=
bind_request_succeeded_servers_.end()) {
return;
}
bind_request_succeeded_servers_.insert(stun_server_addr);
// If socket is shared and |stun_reflected_addr| is equal to local socket
// address, or if the same address has been added by another STUN server,
// then discarding the stun address.
// For STUN, related address is the local socket address.
if ((!SharedSocket() || stun_reflected_addr != socket_->GetLocalAddress()) &&
!HasCandidateWithAddress(stun_reflected_addr)) {
rtc::SocketAddress related_address = socket_->GetLocalAddress();
// If we can't stamp the related address correctly, empty it to avoid leak.
if (!MaybeSetDefaultLocalAddress(&related_address)) {
related_address =
rtc::EmptySocketAddressWithFamily(related_address.family());
}
rtc::StringBuilder url;
url << "stun:" << stun_server_addr.ipaddr().ToString() << ":"
<< stun_server_addr.port();
AddAddress(stun_reflected_addr, socket_->GetLocalAddress(), related_address,
UDP_PROTOCOL_NAME, "", "", STUN_PORT_TYPE,
ICE_TYPE_PREFERENCE_SRFLX, 0, url.str(), false);
}
MaybeSetPortCompleteOrError();
}
void UDPPort::OnStunBindingOrResolveRequestFailed(
const rtc::SocketAddress& stun_server_addr,
int error_code,
const std::string& reason) {
rtc::StringBuilder url;
url << "stun:" << stun_server_addr.ToString();
SignalCandidateError(
this, IceCandidateErrorEvent(GetLocalAddress().HostAsSensitiveURIString(),
GetLocalAddress().port(), url.str(),
error_code, reason));
if (bind_request_failed_servers_.find(stun_server_addr) !=
bind_request_failed_servers_.end()) {
return;
}
bind_request_failed_servers_.insert(stun_server_addr);
MaybeSetPortCompleteOrError();
}
void UDPPort::MaybeSetPortCompleteOrError() {
if (mdns_name_registration_status() ==
MdnsNameRegistrationStatus::kInProgress) {
return;
}
if (ready_) {
return;
}
// Do not set port ready if we are still waiting for bind responses.
const size_t servers_done_bind_request =
bind_request_failed_servers_.size() +
bind_request_succeeded_servers_.size();
if (server_addresses_.size() != servers_done_bind_request) {
return;
}
// Setting ready status.
ready_ = true;
// The port is "completed" if there is no stun server provided, or the bind
// request succeeded for any stun server, or the socket is shared.
if (server_addresses_.empty() || bind_request_succeeded_servers_.size() > 0 ||
SharedSocket()) {
SignalPortComplete(this);
} else {
SignalPortError(this);
}
}
// TODO(?): merge this with SendTo above.
void UDPPort::OnSendPacket(const void* data, size_t size, StunRequest* req) {
StunBindingRequest* sreq = static_cast<StunBindingRequest*>(req);
rtc::PacketOptions options(StunDscpValue());
options.info_signaled_after_sent.packet_type = rtc::PacketType::kStunMessage;
CopyPortInformationToPacketInfo(&options.info_signaled_after_sent);
if (socket_->SendTo(data, size, sreq->server_addr(), options) < 0) {
RTC_LOG_ERR_EX(LERROR, socket_->GetError()) << "sendto";
}
stats_.stun_binding_requests_sent++;
}
bool UDPPort::HasCandidateWithAddress(const rtc::SocketAddress& addr) const {
const std::vector<Candidate>& existing_candidates = Candidates();
std::vector<Candidate>::const_iterator it = existing_candidates.begin();
for (; it != existing_candidates.end(); ++it) {
if (it->address() == addr)
return true;
}
return false;
}
std::unique_ptr<StunPort> StunPort::Create(
rtc::Thread* thread,
rtc::PacketSocketFactory* factory,
rtc::Network* network,
uint16_t min_port,
uint16_t max_port,
const std::string& username,
const std::string& password,
const ServerAddresses& servers,
const std::string& origin,
absl::optional<int> stun_keepalive_interval) {
// Using `new` to access a non-public constructor.
auto port = absl::WrapUnique(new StunPort(thread, factory, network, min_port,
max_port, username, password,
servers, origin));
port->set_stun_keepalive_delay(stun_keepalive_interval);
if (!port->Init()) {
return nullptr;
}
return port;
}
StunPort::StunPort(rtc::Thread* thread,
rtc::PacketSocketFactory* factory,
rtc::Network* network,
uint16_t min_port,
uint16_t max_port,
const std::string& username,
const std::string& password,
const ServerAddresses& servers,
const std::string& origin)
: UDPPort(thread,
factory,
network,
min_port,
max_port,
username,
password,
origin,
false) {
// UDPPort will set these to local udp, updating these to STUN.
set_type(STUN_PORT_TYPE);
set_server_addresses(servers);
}
void StunPort::PrepareAddress() {
SendStunBindingRequests();
}
} // namespace cricket
|
{
"pile_set_name": "Github"
}
|
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Routing\Loader;
use Symfony\Component\Routing\RouteCollection;
use Symfony\Component\Routing\Route;
use Symfony\Component\Config\Resource\FileResource;
use Symfony\Component\Yaml\Exception\ParseException;
use Symfony\Component\Yaml\Parser as YamlParser;
use Symfony\Component\Config\Loader\FileLoader;
use Symfony\Component\Yaml\Yaml;
/**
* YamlFileLoader loads Yaml routing files.
*
* @author Fabien Potencier <[email protected]>
* @author Tobias Schultze <http://tobion.de>
*/
class YamlFileLoader extends FileLoader
{
private static $availableKeys = array(
'resource', 'type', 'prefix', 'path', 'host', 'schemes', 'methods', 'defaults', 'requirements', 'options', 'condition',
);
private $yamlParser;
/**
* Loads a Yaml file.
*
* @param string $file A Yaml file path
* @param string|null $type The resource type
*
* @return RouteCollection A RouteCollection instance
*
* @throws \InvalidArgumentException When a route can't be parsed because YAML is invalid
*/
public function load($file, $type = null)
{
$path = $this->locator->locate($file);
if (!stream_is_local($path)) {
throw new \InvalidArgumentException(sprintf('This is not a local file "%s".', $path));
}
if (!file_exists($path)) {
throw new \InvalidArgumentException(sprintf('File "%s" not found.', $path));
}
if (null === $this->yamlParser) {
$this->yamlParser = new YamlParser();
}
try {
$parsedConfig = $this->yamlParser->parse(file_get_contents($path), Yaml::PARSE_KEYS_AS_STRINGS);
} catch (ParseException $e) {
throw new \InvalidArgumentException(sprintf('The file "%s" does not contain valid YAML.', $path), 0, $e);
}
$collection = new RouteCollection();
$collection->addResource(new FileResource($path));
// empty file
if (null === $parsedConfig) {
return $collection;
}
// not an array
if (!is_array($parsedConfig)) {
throw new \InvalidArgumentException(sprintf('The file "%s" must contain a YAML array.', $path));
}
foreach ($parsedConfig as $name => $config) {
$this->validate($config, $name, $path);
if (isset($config['resource'])) {
$this->parseImport($collection, $config, $path, $file);
} else {
$this->parseRoute($collection, $name, $config, $path);
}
}
return $collection;
}
/**
* {@inheritdoc}
*/
public function supports($resource, $type = null)
{
return is_string($resource) && in_array(pathinfo($resource, PATHINFO_EXTENSION), array('yml', 'yaml'), true) && (!$type || 'yaml' === $type);
}
/**
* Parses a route and adds it to the RouteCollection.
*
* @param RouteCollection $collection A RouteCollection instance
* @param string $name Route name
* @param array $config Route definition
* @param string $path Full path of the YAML file being processed
*/
protected function parseRoute(RouteCollection $collection, $name, array $config, $path)
{
$defaults = isset($config['defaults']) ? $config['defaults'] : array();
$requirements = isset($config['requirements']) ? $config['requirements'] : array();
$options = isset($config['options']) ? $config['options'] : array();
$host = isset($config['host']) ? $config['host'] : '';
$schemes = isset($config['schemes']) ? $config['schemes'] : array();
$methods = isset($config['methods']) ? $config['methods'] : array();
$condition = isset($config['condition']) ? $config['condition'] : null;
$route = new Route($config['path'], $defaults, $requirements, $options, $host, $schemes, $methods, $condition);
$collection->add($name, $route);
}
/**
* Parses an import and adds the routes in the resource to the RouteCollection.
*
* @param RouteCollection $collection A RouteCollection instance
* @param array $config Route definition
* @param string $path Full path of the YAML file being processed
* @param string $file Loaded file name
*/
protected function parseImport(RouteCollection $collection, array $config, $path, $file)
{
$type = isset($config['type']) ? $config['type'] : null;
$prefix = isset($config['prefix']) ? $config['prefix'] : '';
$defaults = isset($config['defaults']) ? $config['defaults'] : array();
$requirements = isset($config['requirements']) ? $config['requirements'] : array();
$options = isset($config['options']) ? $config['options'] : array();
$host = isset($config['host']) ? $config['host'] : null;
$condition = isset($config['condition']) ? $config['condition'] : null;
$schemes = isset($config['schemes']) ? $config['schemes'] : null;
$methods = isset($config['methods']) ? $config['methods'] : null;
$this->setCurrentDir(dirname($path));
$subCollection = $this->import($config['resource'], $type, false, $file);
/* @var $subCollection RouteCollection */
$subCollection->addPrefix($prefix);
if (null !== $host) {
$subCollection->setHost($host);
}
if (null !== $condition) {
$subCollection->setCondition($condition);
}
if (null !== $schemes) {
$subCollection->setSchemes($schemes);
}
if (null !== $methods) {
$subCollection->setMethods($methods);
}
$subCollection->addDefaults($defaults);
$subCollection->addRequirements($requirements);
$subCollection->addOptions($options);
$collection->addCollection($subCollection);
}
/**
* Validates the route configuration.
*
* @param array $config A resource config
* @param string $name The config key
* @param string $path The loaded file path
*
* @throws \InvalidArgumentException If one of the provided config keys is not supported,
* something is missing or the combination is nonsense
*/
protected function validate($config, $name, $path)
{
if (!is_array($config)) {
throw new \InvalidArgumentException(sprintf('The definition of "%s" in "%s" must be a YAML array.', $name, $path));
}
if ($extraKeys = array_diff(array_keys($config), self::$availableKeys)) {
throw new \InvalidArgumentException(sprintf(
'The routing file "%s" contains unsupported keys for "%s": "%s". Expected one of: "%s".',
$path, $name, implode('", "', $extraKeys), implode('", "', self::$availableKeys)
));
}
if (isset($config['resource']) && isset($config['path'])) {
throw new \InvalidArgumentException(sprintf(
'The routing file "%s" must not specify both the "resource" key and the "path" key for "%s". Choose between an import and a route definition.',
$path, $name
));
}
if (!isset($config['resource']) && isset($config['type'])) {
throw new \InvalidArgumentException(sprintf(
'The "type" key for the route definition "%s" in "%s" is unsupported. It is only available for imports in combination with the "resource" key.',
$name, $path
));
}
if (!isset($config['resource']) && !isset($config['path'])) {
throw new \InvalidArgumentException(sprintf(
'You must define a "path" for the route "%s" in file "%s".',
$name, $path
));
}
}
}
|
{
"pile_set_name": "Github"
}
|
0 0.0
2 2.0
4 4.0
5 15.0
8 8.0
9 9.0
10 10.0
11 11.0
12 24.0
15 30.0
|
{
"pile_set_name": "Github"
}
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Estimator regression tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import tensorflow as tf
from tensorflow.contrib.learn.python.learn.learn_io import data_feeder
def _get_input_fn(x, y, batch_size=None):
df = data_feeder.setup_train_data_feeder(
x, y, n_classes=None, batch_size=batch_size)
return df.input_builder, df.get_feed_dict_fn()
# We use a null optimizer since we can't get deterministic results out of
# supervisor's multiple threads.
class _NullOptimizer(tf.train.Optimizer):
def __init__(self):
super(_NullOptimizer, self).__init__(use_locking=False, name='Null')
def _apply_dense(self, grad, var):
return tf.no_op()
def _apply_sparse(self, grad, var):
return tf.no_op()
def _prepare(self):
pass
_NULL_OPTIMIZER = _NullOptimizer()
class StabilityTest(tf.test.TestCase):
"""Tests that estiamtors are reproducible."""
def testRandomStability(self):
my_seed = 42
minval = -0.3333
maxval = 0.3333
with tf.Graph().as_default() as g:
with self.test_session(graph=g) as session:
g.seed = my_seed
x = tf.random_uniform([10, 10], minval=minval, maxval=maxval)
val1 = session.run(x)
with tf.Graph().as_default() as g:
with self.test_session(graph=g) as session:
g.seed = my_seed
x = tf.random_uniform([10, 10], minval=minval, maxval=maxval)
val2 = session.run(x)
self.assertAllClose(val1, val2)
def testLinearRegression(self):
my_seed = 42
config = tf.contrib.learn.RunConfig(tf_random_seed=my_seed)
boston = tf.contrib.learn.datasets.load_boston()
columns = [tf.contrib.layers.real_valued_column('', dimension=13)]
# We train with
with tf.Graph().as_default() as g1:
random.seed(my_seed)
g1.seed = my_seed
tf.contrib.framework.create_global_step()
regressor1 = tf.contrib.learn.LinearRegressor(optimizer=_NULL_OPTIMIZER,
feature_columns=columns,
config=config)
regressor1.fit(x=boston.data, y=boston.target, steps=1)
with tf.Graph().as_default() as g2:
random.seed(my_seed)
g2.seed = my_seed
tf.contrib.framework.create_global_step()
regressor2 = tf.contrib.learn.LinearRegressor(optimizer=_NULL_OPTIMIZER,
feature_columns=columns,
config=config)
regressor2.fit(x=boston.data, y=boston.target, steps=1)
self.assertAllClose(regressor1.weights_, regressor2.weights_)
self.assertAllClose(regressor1.bias_, regressor2.bias_)
self.assertAllClose(
list(regressor1.predict(boston.data, as_iterable=True)),
list(regressor2.predict(boston.data, as_iterable=True)), atol=1e-05)
def testDNNRegression(self):
my_seed = 42
config = tf.contrib.learn.RunConfig(tf_random_seed=my_seed)
boston = tf.contrib.learn.datasets.load_boston()
columns = [tf.contrib.layers.real_valued_column('', dimension=13)]
with tf.Graph().as_default() as g1:
random.seed(my_seed)
g1.seed = my_seed
tf.contrib.framework.create_global_step()
regressor1 = tf.contrib.learn.DNNRegressor(
hidden_units=[10], feature_columns=columns,
optimizer=_NULL_OPTIMIZER, config=config)
regressor1.fit(x=boston.data, y=boston.target, steps=1)
with tf.Graph().as_default() as g2:
random.seed(my_seed)
g2.seed = my_seed
tf.contrib.framework.create_global_step()
regressor2 = tf.contrib.learn.DNNRegressor(
hidden_units=[10], feature_columns=columns,
optimizer=_NULL_OPTIMIZER, config=config)
regressor2.fit(x=boston.data, y=boston.target, steps=1)
for w1, w2 in zip(regressor1.weights_, regressor2.weights_):
self.assertAllClose(w1, w2)
for b1, b2 in zip(regressor2.bias_, regressor2.bias_):
self.assertAllClose(b1, b2)
self.assertAllClose(
list(regressor1.predict(boston.data, as_iterable=True)),
list(regressor2.predict(boston.data, as_iterable=True)), atol=1e-05)
if __name__ == '__main__':
tf.test.main()
|
{
"pile_set_name": "Github"
}
|
/**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {CommonSignals} from './common-signals';
import {Services} from './services';
import {createElementWithAttributes, removeElement} from './dom';
import {devAssert} from './log';
import {dict} from './utils/object';
import {isArray, toWin} from './types';
import {triggerAnalyticsEvent} from './analytics';
/**
* Method to create scoped analytics element for any element.
* TODO: Make this function private
* @param {!Element} parentElement
* @param {!JsonObject} config
* @param {boolean=} loadAnalytics
* @param {boolean=} disableImmediate
* @return {!Element} created analytics element
*/
export function insertAnalyticsElement(
parentElement,
config,
loadAnalytics = false,
disableImmediate = false
) {
const doc = /** @type {!Document} */ (parentElement.ownerDocument);
const analyticsElem = createElementWithAttributes(
doc,
'amp-analytics',
dict({
'sandbox': 'true',
'trigger': disableImmediate ? '' : 'immediate',
})
);
const scriptElem = createElementWithAttributes(
doc,
'script',
dict({
'type': 'application/json',
})
);
scriptElem.textContent = JSON.stringify(config);
analyticsElem.appendChild(scriptElem);
analyticsElem.CONFIG = config;
// Force load analytics extension if script not included in page.
if (loadAnalytics) {
// Get Extensions service and force load analytics extension.
const extensions = Services.extensionsFor(
toWin(parentElement.ownerDocument.defaultView)
);
const ampdoc = Services.ampdoc(parentElement);
extensions./*OK*/ installExtensionForDoc(ampdoc, 'amp-analytics');
} else {
Services.analyticsForDocOrNull(parentElement).then((analytics) => {
devAssert(analytics);
});
}
parentElement.appendChild(analyticsElem);
return analyticsElem;
}
/**
* A class that handles customEvent reporting of extension element through
* amp-analytics. This class is not exposed to extension element directly to
* restrict the genration of the config Please use CustomEventReporterBuilder to
* build a CustomEventReporter instance.
*/
class CustomEventReporter {
/**
* @param {!Element} parent
* @param {!JsonObject} config
*/
constructor(parent, config) {
devAssert(config['triggers'], 'Config must have triggers defined');
/** @private {string} */
this.id_ = parent.getResourceId();
/** @private {!AmpElement} */
this.parent_ = parent;
/** @private {JsonObject} */
this.config_ = config;
for (const event in config['triggers']) {
const eventType = config['triggers'][event]['on'];
devAssert(
eventType,
'CustomEventReporter config must specify trigger eventType'
);
const newEventType = this.getEventTypeInSandbox_(eventType);
config['triggers'][event]['on'] = newEventType;
}
this.parent_
.signals()
.whenSignal(CommonSignals.LOAD_START)
.then(() => {
insertAnalyticsElement(this.parent_, config, true);
});
}
/**
* @param {string} eventType
* @param {!JsonObject=} opt_vars A map of vars and their values.
*/
trigger(eventType, opt_vars) {
devAssert(
this.config_['triggers'][eventType],
'Cannot trigger non initiated eventType'
);
triggerAnalyticsEvent(
this.parent_,
this.getEventTypeInSandbox_(eventType),
opt_vars,
/** enableDataVars */ false
);
}
/**
* @param {string} eventType
* @return {string}
*/
getEventTypeInSandbox_(eventType) {
return `sandbox-${this.id_}-${eventType}`;
}
}
/**
* A builder class that enable extension elements to easily build and get a
* CustomEventReporter instance. Its constructor requires the parent AMP
* element. It provides two methods #track() and #build() to build the
* CustomEventReporter instance.
*/
export class CustomEventReporterBuilder {
/** @param {!AmpElement} parent */
constructor(parent) {
/** @private {!AmpElement} */
this.parent_ = parent;
/** @private {?JsonObject} */
this.config_ = /** @type {JsonObject} */ ({
'requests': {},
'triggers': {},
});
}
/**
* @param {!JsonObject} transportConfig
*/
setTransportConfig(transportConfig) {
this.config_['transport'] = transportConfig;
}
/**
* @param {!JsonObject} extraUrlParamsConfig
*/
setExtraUrlParams(extraUrlParamsConfig) {
this.config_['extraUrlParams'] = extraUrlParamsConfig;
}
/**
* The #track() method takes in a unique custom-event name, and the
* corresponding request url (or an array of request urls). One can call
* #track() multiple times with different eventType name (order doesn't
* matter) before #build() is called.
* @param {string} eventType
* @param {string|!Array<string>} request
* @return {!CustomEventReporterBuilder}
*/
track(eventType, request) {
request = isArray(request) ? request : [request];
devAssert(
!this.config_['triggers'][eventType],
'customEventReporterBuilder should not track same eventType twice'
);
const requestList = [];
for (let i = 0; i < request.length; i++) {
const requestName = `${eventType}-request-${i}`;
this.config_['requests'][requestName] = request[i];
requestList.push(requestName);
}
this.config_['triggers'][eventType] = {
'on': eventType,
'request': requestList,
};
return this;
}
/**
* Call the #build() method to build and get the CustomEventReporter instance.
* One CustomEventReporterBuilder instance can only build one reporter, which
* means #build() should only be called once after all eventType are added.
* @return {!CustomEventReporter}
*/
build() {
devAssert(this.config_, 'CustomEventReporter already built');
const report = new CustomEventReporter(
this.parent_,
/** @type {!JsonObject} */ (this.config_)
);
this.config_ = null;
return report;
}
}
/**
* A helper method that should be used by all extension elements to add their
* sandbox analytics tracking. This method takes care of insert and remove the
* analytics tracker at the right time of the element lifecycle.
* @param {!AmpElement} element
* @param {!Promise<!JsonObject>} promise
*/
export function useAnalyticsInSandbox(element, promise) {
let analyticsElement = null;
let configPromise = promise;
// Listener to LOAD_START signal. Insert analytics element on LOAD_START
element
.signals()
.whenSignal(CommonSignals.LOAD_START)
.then(() => {
if (analyticsElement || !configPromise) {
return;
}
configPromise.then((config) => {
if (!configPromise) {
// If config promise resolve after unload, do nothing.
return;
}
configPromise = null;
analyticsElement = insertAnalyticsElement(element, config, false);
});
});
// Listener to UNLOAD signal. Destroy remove element on UNLOAD
element
.signals()
.whenSignal(CommonSignals.UNLOAD)
.then(() => {
configPromise = null;
if (analyticsElement) {
removeElement(analyticsElement);
analyticsElement = null;
}
});
}
|
{
"pile_set_name": "Github"
}
|
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
/*
* Test that originalDefaultEngine property is set and switches correctly.
*/
"use strict";
add_task(async function setup() {
Region._setHomeRegion("an", false);
await AddonTestUtils.promiseStartupManager();
await SearchTestUtils.useTestEngines("test-extensions");
});
function promiseDefaultNotification() {
return SearchTestUtils.promiseSearchNotification(
SearchUtils.MODIFIED_TYPE.DEFAULT,
SearchUtils.TOPIC_ENGINE_MODIFIED
);
}
add_task(async function test_originalDefaultEngine() {
await Promise.all([Services.search.init(), promiseAfterSettings()]);
Assert.equal(
Services.search.originalDefaultEngine.name,
"Multilocale AN",
"Should have returned the correct original default engine"
);
});
add_task(async function test_changeRegion() {
// Now change the region, and check we get the correct default according to
// the config file.
// Note: the test could be done with changing regions or locales. The important
// part is that the default engine is changing across the switch, and that
// the engine is not the first one in the new sorted engines list.
await promiseSetHomeRegion("tr");
Assert.equal(
Services.search.originalDefaultEngine.name,
// Very important this default is not the first one in the list (which is
// the next fallback if the config one can't be found).
"Special",
"Should have returned the correct engine for the new locale"
);
});
|
{
"pile_set_name": "Github"
}
|
; This test checks that proper directives to switch between ARM and Thumb mode
; are added when linking ARM and Thumb modules.
; RUN: llvm-as %s -o %t1.bc
; RUN: llvm-as %p/Inputs/thumb-module-inline-asm.ll -o %t2.bc
; RUN: llvm-link %t1.bc %t2.bc -S 2> %t3.out | FileCheck %s
target triple = "armv7-linux-gnueabihf"
module asm "add r1, r2, r2"
; CHECK: .text
; CHECK-NEXT: .balign 4
; CHECK-NEXT: .arm
; CHECK-NEXT: add r1, r2, r2
; CHECK-NEXT: module asm
; CHECK-NEXT: .text
; CHECK-NEXT: .balign 2
; CHECK-NEXT: .thumb
; CHECK-NEXT: orn r1, r2, r2
|
{
"pile_set_name": "Github"
}
|
<template style="display: block">
<slot></slot>
</template>
|
{
"pile_set_name": "Github"
}
|
#!/usr/bin/perl -w
# (c) 2009, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
# Display r/w activity for all processes
# The common_* event handler fields are the most useful fields common to
# all events. They don't necessarily correspond to the 'common_*' fields
# in the status files. Those fields not available as handler params can
# be retrieved via script functions of the form get_common_*().
use 5.010000;
use strict;
use warnings;
use lib "$ENV{'PERF_EXEC_PATH'}/scripts/perl/Perf-Trace-Util/lib";
use lib "./Perf-Trace-Util/lib";
use Perf::Trace::Core;
use Perf::Trace::Util;
my %reads;
my %writes;
sub syscalls::sys_exit_read
{
my ($event_name, $context, $common_cpu, $common_secs, $common_nsecs,
$common_pid, $common_comm,
$nr, $ret) = @_;
if ($ret > 0) {
$reads{$common_pid}{bytes_read} += $ret;
} else {
if (!defined ($reads{$common_pid}{bytes_read})) {
$reads{$common_pid}{bytes_read} = 0;
}
$reads{$common_pid}{errors}{$ret}++;
}
}
sub syscalls::sys_enter_read
{
my ($event_name, $context, $common_cpu, $common_secs, $common_nsecs,
$common_pid, $common_comm,
$nr, $fd, $buf, $count) = @_;
$reads{$common_pid}{bytes_requested} += $count;
$reads{$common_pid}{total_reads}++;
$reads{$common_pid}{comm} = $common_comm;
}
sub syscalls::sys_exit_write
{
my ($event_name, $context, $common_cpu, $common_secs, $common_nsecs,
$common_pid, $common_comm,
$nr, $ret) = @_;
if ($ret <= 0) {
$writes{$common_pid}{errors}{$ret}++;
}
}
sub syscalls::sys_enter_write
{
my ($event_name, $context, $common_cpu, $common_secs, $common_nsecs,
$common_pid, $common_comm,
$nr, $fd, $buf, $count) = @_;
$writes{$common_pid}{bytes_written} += $count;
$writes{$common_pid}{total_writes}++;
$writes{$common_pid}{comm} = $common_comm;
}
sub trace_end
{
printf("read counts by pid:\n\n");
printf("%6s %20s %10s %10s %10s\n", "pid", "comm",
"# reads", "bytes_requested", "bytes_read");
printf("%6s %-20s %10s %10s %10s\n", "------", "--------------------",
"-----------", "----------", "----------");
foreach my $pid (sort { ($reads{$b}{bytes_read} || 0) <=>
($reads{$a}{bytes_read} || 0) } keys %reads) {
my $comm = $reads{$pid}{comm} || "";
my $total_reads = $reads{$pid}{total_reads} || 0;
my $bytes_requested = $reads{$pid}{bytes_requested} || 0;
my $bytes_read = $reads{$pid}{bytes_read} || 0;
printf("%6s %-20s %10s %10s %10s\n", $pid, $comm,
$total_reads, $bytes_requested, $bytes_read);
}
printf("\nfailed reads by pid:\n\n");
printf("%6s %20s %6s %10s\n", "pid", "comm", "error #", "# errors");
printf("%6s %20s %6s %10s\n", "------", "--------------------",
"------", "----------");
my @errcounts = ();
foreach my $pid (keys %reads) {
foreach my $error (keys %{$reads{$pid}{errors}}) {
my $comm = $reads{$pid}{comm} || "";
my $errcount = $reads{$pid}{errors}{$error} || 0;
push @errcounts, [$pid, $comm, $error, $errcount];
}
}
@errcounts = sort { $b->[3] <=> $a->[3] } @errcounts;
for my $i (0 .. $#errcounts) {
printf("%6d %-20s %6d %10s\n", $errcounts[$i][0],
$errcounts[$i][1], $errcounts[$i][2], $errcounts[$i][3]);
}
printf("\nwrite counts by pid:\n\n");
printf("%6s %20s %10s %10s\n", "pid", "comm",
"# writes", "bytes_written");
printf("%6s %-20s %10s %10s\n", "------", "--------------------",
"-----------", "----------");
foreach my $pid (sort { ($writes{$b}{bytes_written} || 0) <=>
($writes{$a}{bytes_written} || 0)} keys %writes) {
my $comm = $writes{$pid}{comm} || "";
my $total_writes = $writes{$pid}{total_writes} || 0;
my $bytes_written = $writes{$pid}{bytes_written} || 0;
printf("%6s %-20s %10s %10s\n", $pid, $comm,
$total_writes, $bytes_written);
}
printf("\nfailed writes by pid:\n\n");
printf("%6s %20s %6s %10s\n", "pid", "comm", "error #", "# errors");
printf("%6s %20s %6s %10s\n", "------", "--------------------",
"------", "----------");
@errcounts = ();
foreach my $pid (keys %writes) {
foreach my $error (keys %{$writes{$pid}{errors}}) {
my $comm = $writes{$pid}{comm} || "";
my $errcount = $writes{$pid}{errors}{$error} || 0;
push @errcounts, [$pid, $comm, $error, $errcount];
}
}
@errcounts = sort { $b->[3] <=> $a->[3] } @errcounts;
for my $i (0 .. $#errcounts) {
printf("%6d %-20s %6d %10s\n", $errcounts[$i][0],
$errcounts[$i][1], $errcounts[$i][2], $errcounts[$i][3]);
}
print_unhandled();
}
my %unhandled;
sub print_unhandled
{
if ((scalar keys %unhandled) == 0) {
return;
}
print "\nunhandled events:\n\n";
printf("%-40s %10s\n", "event", "count");
printf("%-40s %10s\n", "----------------------------------------",
"-----------");
foreach my $event_name (keys %unhandled) {
printf("%-40s %10d\n", $event_name, $unhandled{$event_name});
}
}
sub trace_unhandled
{
my ($event_name, $context, $common_cpu, $common_secs, $common_nsecs,
$common_pid, $common_comm) = @_;
$unhandled{$event_name}++;
}
|
{
"pile_set_name": "Github"
}
|
// This file is part of libigl, a simple c++ geometry processing library.
//
// Copyright (C) 2013 Alec Jacobson <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla Public License
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
#include "random_dir.h"
#include <igl/PI.h>
#include <cmath>
IGL_INLINE Eigen::Vector3d igl::random_dir()
{
using namespace Eigen;
double z = (double)rand() / (double)RAND_MAX*2.0 - 1.0;
double t = (double)rand() / (double)RAND_MAX*2.0*PI;
// http://www.altdevblogaday.com/2012/05/03/generating-uniformly-distributed-points-on-sphere/
double r = sqrt(1.0-z*z);
double x = r * cos(t);
double y = r * sin(t);
return Vector3d(x,y,z);
}
IGL_INLINE Eigen::MatrixXd igl::random_dir_stratified(const int n)
{
using namespace Eigen;
using namespace std;
const double m = std::floor(sqrt(double(n)));
MatrixXd N(n,3);
int row = 0;
for(int i = 0;i<m;i++)
{
const double x = double(i)*1./m;
for(int j = 0;j<m;j++)
{
const double y = double(j)*1./m;
double z = (x+(1./m)*(double)rand() / (double)RAND_MAX)*2.0 - 1.0;
double t = (y+(1./m)*(double)rand() / (double)RAND_MAX)*2.0*PI;
double r = sqrt(1.0-z*z);
N(row,0) = r * cos(t);
N(row,1) = r * sin(t);
N(row,2) = z;
row++;
}
}
// Finish off with uniform random directions
for(;row<n;row++)
{
N.row(row) = random_dir();
}
return N;
}
|
{
"pile_set_name": "Github"
}
|
" Vim support file to switch off loading indent files for file types
"
" Maintainer: Bram Moolenaar <[email protected]>
" Last Change: 2001 Jun 11
if exists("did_indent_on")
unlet did_indent_on
endif
" Remove all autocommands in the filetypeindent group
silent! au! filetypeindent *
|
{
"pile_set_name": "Github"
}
|
[chuck](VM): NullPointerException: (array access) on line[3] in shred[id=1:./01-Basic/81.ck]
|
{
"pile_set_name": "Github"
}
|
export { default as useDimensions } from "./useDimensions";
export { useStylesheet, getStylesheet } from "./useStylesheet";
export {
default as MediaQuery,
MediaQuery as IMediaQuery,
mediaQuery
} from "./MediaQuery";
export { default as ResponsiveComponent } from "./ResponsiveComponent";
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC "-//NetBeans//DTD Module Status 1.0//EN"
"http://www.netbeans.org/dtds/module-status-1_0.dtd">
<module name="com.sun.tools.visualvm.sa">
<param name="autoload">false</param>
<param name="eager">false</param>
<param name="enabled">true</param>
<param name="jar">modules/com-sun-tools-visualvm-sa.jar</param>
<param name="reloadable">false</param>
</module>
|
{
"pile_set_name": "Github"
}
|
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/miniconda.R
\name{miniconda_path}
\alias{miniconda_path}
\title{Path to Miniconda}
\usage{
miniconda_path()
}
\description{
The path to the Miniconda installation to use. By default, an OS-specific
path is used. If you'd like to instead set your own path, you can set the
\code{RETICULATE_MINICONDA_PATH} environment variable.
}
\seealso{
Other miniconda:
\code{\link{install_miniconda}()},
\code{\link{miniconda_update}()}
}
\concept{miniconda}
|
{
"pile_set_name": "Github"
}
|
/* Copyright (C) 1991-2020 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with the GNU C Library; if not, see
<https://www.gnu.org/licenses/>. */
#if !_LIBC
# include <libc-config.h>
# include "tempname.h"
#endif
#include <sys/types.h>
#include <assert.h>
#include <errno.h>
#include <stdio.h>
#ifndef P_tmpdir
# define P_tmpdir "/tmp"
#endif
#ifndef TMP_MAX
# define TMP_MAX 238328
#endif
#ifndef __GT_FILE
# define __GT_FILE 0
# define __GT_DIR 1
# define __GT_NOCREATE 2
#endif
#if !_LIBC && (GT_FILE != __GT_FILE || GT_DIR != __GT_DIR \
|| GT_NOCREATE != __GT_NOCREATE)
# error report this to [email protected]
#endif
#include <stddef.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <stdint.h>
#include <sys/random.h>
#include <sys/stat.h>
#if _LIBC
# define struct_stat64 struct stat64
# define __secure_getenv __libc_secure_getenv
#else
# define struct_stat64 struct stat
# define __gen_tempname gen_tempname
# define __mkdir mkdir
# define __open open
# define __lxstat64(version, file, buf) lstat (file, buf)
#endif
#ifdef _LIBC
# include <random-bits.h>
# define RANDOM_BITS(Var) ((Var) = random_bits ())
typedef uint32_t random_value;
# define RANDOM_VALUE_MAX UINT32_MAX
# define BASE_62_DIGITS 5 /* 62**5 < UINT32_MAX */
# define BASE_62_POWER (62 * 62 * 62 * 62 * 62) /* 2**BASE_62_DIGITS */
#else
/* Use getrandom if it works, falling back on a 64-bit linear
congruential generator that starts with whatever Var's value
happens to be. */
# define RANDOM_BITS(Var) \
((void) (getrandom (&(Var), sizeof (Var), 0) == sizeof (Var) \
|| ((Var) = 2862933555777941757 * (Var) + 3037000493)))
typedef uint_fast64_t random_value;
# define RANDOM_VALUE_MAX UINT_FAST64_MAX
# define BASE_62_DIGITS 10 /* 62**10 < UINT_FAST64_MAX */
# define BASE_62_POWER (62LL * 62 * 62 * 62 * 62 * 62 * 62 * 62 * 62 * 62)
#endif
#if _LIBC
/* Return nonzero if DIR is an existent directory. */
static int
direxists (const char *dir)
{
struct_stat64 buf;
return __xstat64 (_STAT_VER, dir, &buf) == 0 && S_ISDIR (buf.st_mode);
}
/* Path search algorithm, for tmpnam, tmpfile, etc. If DIR is
non-null and exists, uses it; otherwise uses the first of $TMPDIR,
P_tmpdir, /tmp that exists. Copies into TMPL a template suitable
for use with mk[s]temp. Will fail (-1) if DIR is non-null and
doesn't exist, none of the searched dirs exists, or there's not
enough space in TMPL. */
int
__path_search (char *tmpl, size_t tmpl_len, const char *dir, const char *pfx,
int try_tmpdir)
{
const char *d;
size_t dlen, plen;
if (!pfx || !pfx[0])
{
pfx = "file";
plen = 4;
}
else
{
plen = strlen (pfx);
if (plen > 5)
plen = 5;
}
if (try_tmpdir)
{
d = __secure_getenv ("TMPDIR");
if (d != NULL && direxists (d))
dir = d;
else if (dir != NULL && direxists (dir))
/* nothing */ ;
else
dir = NULL;
}
if (dir == NULL)
{
if (direxists (P_tmpdir))
dir = P_tmpdir;
else if (strcmp (P_tmpdir, "/tmp") != 0 && direxists ("/tmp"))
dir = "/tmp";
else
{
__set_errno (ENOENT);
return -1;
}
}
dlen = strlen (dir);
while (dlen > 1 && dir[dlen - 1] == '/')
dlen--; /* remove trailing slashes */
/* check we have room for "${dir}/${pfx}XXXXXX\0" */
if (tmpl_len < dlen + 1 + plen + 6 + 1)
{
__set_errno (EINVAL);
return -1;
}
sprintf (tmpl, "%.*s/%.*sXXXXXX", (int) dlen, dir, (int) plen, pfx);
return 0;
}
#endif /* _LIBC */
#if _LIBC
static int try_tempname_len (char *, int, void *, int (*) (char *, void *),
size_t);
#endif
static int
try_file (char *tmpl, void *flags)
{
int *openflags = flags;
return __open (tmpl,
(*openflags & ~O_ACCMODE)
| O_RDWR | O_CREAT | O_EXCL, S_IRUSR | S_IWUSR);
}
static int
try_dir (char *tmpl, void *flags _GL_UNUSED)
{
return __mkdir (tmpl, S_IRUSR | S_IWUSR | S_IXUSR);
}
static int
try_nocreate (char *tmpl, void *flags _GL_UNUSED)
{
struct_stat64 st;
if (__lxstat64 (_STAT_VER, tmpl, &st) == 0 || errno == EOVERFLOW)
__set_errno (EEXIST);
return errno == ENOENT ? 0 : -1;
}
/* These are the characters used in temporary file names. */
static const char letters[] =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
/* Generate a temporary file name based on TMPL. TMPL must match the
rules for mk[s]temp (i.e., end in at least X_SUFFIX_LEN "X"s,
possibly with a suffix).
The name constructed does not exist at the time of the call to
this function. TMPL is overwritten with the result.
KIND may be one of:
__GT_NOCREATE: simply verify that the name does not exist
at the time of the call.
__GT_FILE: create the file using open(O_CREAT|O_EXCL)
and return a read-write fd. The file is mode 0600.
__GT_DIR: create a directory, which will be mode 0700.
We use a clever algorithm to get hard-to-predict names. */
#ifdef _LIBC
static
#endif
int
gen_tempname_len (char *tmpl, int suffixlen, int flags, int kind,
size_t x_suffix_len)
{
static int (*const tryfunc[]) (char *, void *) =
{
[__GT_FILE] = try_file,
[__GT_DIR] = try_dir,
[__GT_NOCREATE] = try_nocreate
};
return try_tempname_len (tmpl, suffixlen, &flags, tryfunc[kind],
x_suffix_len);
}
#ifdef _LIBC
static
#endif
int
try_tempname_len (char *tmpl, int suffixlen, void *args,
int (*tryfunc) (char *, void *), size_t x_suffix_len)
{
size_t len;
char *XXXXXX;
unsigned int count;
int fd = -1;
int save_errno = errno;
/* A lower bound on the number of temporary files to attempt to
generate. The maximum total number of temporary file names that
can exist for a given template is 62**6. It should never be
necessary to try all of these combinations. Instead if a reasonable
number of names is tried (we define reasonable as 62**3) fail to
give the system administrator the chance to remove the problems.
This value requires that X_SUFFIX_LEN be at least 3. */
#define ATTEMPTS_MIN (62 * 62 * 62)
/* The number of times to attempt to generate a temporary file. To
conform to POSIX, this must be no smaller than TMP_MAX. */
#if ATTEMPTS_MIN < TMP_MAX
unsigned int attempts = TMP_MAX;
#else
unsigned int attempts = ATTEMPTS_MIN;
#endif
/* A random variable. */
random_value v;
/* How many random base-62 digits can currently be extracted from V. */
int vdigits = 0;
/* Least unfair value for V. If V is less than this, V can generate
BASE_62_DIGITS digits fairly. Otherwise it might be biased. */
random_value const unfair_min
= RANDOM_VALUE_MAX - RANDOM_VALUE_MAX % BASE_62_POWER;
len = strlen (tmpl);
if (len < x_suffix_len + suffixlen
|| strspn (&tmpl[len - x_suffix_len - suffixlen], "X") < x_suffix_len)
{
__set_errno (EINVAL);
return -1;
}
/* This is where the Xs start. */
XXXXXX = &tmpl[len - x_suffix_len - suffixlen];
for (count = 0; count < attempts; ++count)
{
for (size_t i = 0; i < x_suffix_len; i++)
{
if (vdigits == 0)
{
do
RANDOM_BITS (v);
while (unfair_min <= v);
vdigits = BASE_62_DIGITS;
}
XXXXXX[i] = letters[v % 62];
v /= 62;
vdigits--;
}
fd = tryfunc (tmpl, args);
if (fd >= 0)
{
__set_errno (save_errno);
return fd;
}
else if (errno != EEXIST)
return -1;
}
/* We got out of the loop because we ran out of combinations to try. */
__set_errno (EEXIST);
return -1;
}
int
__gen_tempname (char *tmpl, int suffixlen, int flags, int kind)
{
return gen_tempname_len (tmpl, suffixlen, flags, kind, 6);
}
#if !_LIBC
int
try_tempname (char *tmpl, int suffixlen, void *args,
int (*tryfunc) (char *, void *))
{
return try_tempname_len (tmpl, suffixlen, args, tryfunc, 6);
}
#endif
|
{
"pile_set_name": "Github"
}
|
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<html>
<head>
<title>{{$title}}</title>
<meta name="generator" content="editplus">
<meta name="author" content="nuttycoder">
<link href="{{$template_root}}/all_purpose_style.css" rel="stylesheet" type="text/css" />
<script src="./template/admin/cssjs/global.functions.js"></script>
<script type="text/javascript" src="{{$template_root}}/cssjs/jquery-1.10.2.min.js"></script>
<script type="text/javascript" src="{{$template_root}}/cssjs/_ajaxdtree.js"></script>
<link href="{{$template_root}}/cssjs/dtree.css" rel="stylesheet" type="text/css" />
<script src="./template/admin/cssjs/jscal2.js"></script>
<script src="./template/admin/cssjs/cn.js"></script>
<link type="text/css" rel="stylesheet" href="./template/admin/cssjs/jscal2.css" />
<link type="text/css" rel="stylesheet" href="./template/admin/cssjs/border-radius.css" />
<script language="javascript">
function check_add_user(){
return(true);
}
{{if $_config.LDAP}}
var foundparent = false;
var servergroup = new Array();
var usergroup = new Array();
{{/if}}
{{*
var AllMember = new Array();
i=0;
{{section name=kk loop=$users}}
AllMember[{{$smarty.section.kk.index}}] = new Array();
AllMember[{{$smarty.section.kk.index}}]['username']='{{$users[kk].username}}';
AllMember[{{$smarty.section.kk.index}}]['realname']='{{$users[kk].realname}}';
AllMember[{{$smarty.section.kk.index}}]['uid']='{{$users[kk].uid}}';
AllMember[{{$smarty.section.kk.index}}]['groupid']='{{$users[kk].groupid}}';
AllMember[{{$smarty.section.kk.index}}]['check']='{{$users[kk].check}}';
{{/section}}
*}}
function enablepri(c, item){
c=!c;//alert(item);
switch(item){
case 'usergroup':
document.getElementById('groupiddh').disabled=c;
break;
case 'start_time':
document.getElementById('start_time').disabled=c;
document.getElementById('f_rangeStart_trigger').disabled=c;
break;
case 'limit_time':
document.getElementById('limit_time').disabled=c;
document.getElementById('f_rangeEnd_trigger').disabled=c;
document.getElementById('nolimit').disabled=c;
break;
case 'ipv4':
document.getElementById('sourceip').disabled=c;
break;
case 'ipv6':
document.getElementById('sourceipv6').disabled=c;
break;
case 'enable':
document.getElementById('enable').disabled=c;
break;
case 'weektime':
document.getElementById('weektime').disabled=c;
break;
case 'restrictweb':
document.getElementById('restrictweb').disabled=c;
break;
case 'apphost':
document.getElementById('apphost').disabled=c;
break;
case 'loginauth':
document.getElementById('localauth').disabled=c;
document.getElementById('radiusauth').disabled=c;
document.getElementById('ldapauth').disabled=c;
document.getElementById('adauth').disabled=c;
document.getElementById('auth').disabled=c;
document.getElementById('authtype').disabled=c;
break;
case 'rdpclipboard':
document.getElementById('rdpclipauth_up').disabled=c;
document.getElementById('rdpclipauth_down').disabled=c;
break;
case 'rdpdiskauth_up':
document.getElementById('rdpdiskauth_up').disabled=c;
break;
case 'rdpdisk':
document.getElementById('rdpdisk').disabled=c;
break;
case 'allowchange':
document.getElementById('allowchange').disabled=c;
break;
case 'rdplocal':
document.getElementById('rdplocal').disabled=c;
break;
case 'passwordsave':
document.getElementById('passwordsave').disabled=c;
break;
case 'default_control':
document.getElementById('default_control').disabled=c;
break;
case 'rdplocalcheck':
document.getElementById('rdplocalcheck').disabled=c;
break;
case 'default_appcontrol':
document.getElementById('default_appcontrol').disabled=c;
break;
case 'firstauth':
document.getElementById('firstauth').disabled=c;
break;
case 'apptoadmingroup':
document.getElementById('apptoadmingroup').disabled=c;
break;
case 'apptodisk':
document.getElementById('apptodisk').disabled=c;
break;
case 'webportal':
document.getElementById('webportal').disabled=c;
document.getElementById('webportaltime').disabled=c;
break;
case 'asyncoutpass':
document.getElementById('asyncoutpass').disabled=c;
break;
case 'tranportauth':
document.getElementById('tranportauth').disabled=c;
break;
case 'serverport':
document.getElementById('sshport').disabled=c;
document.getElementById('rdpport').disabled=c;
break;
}
}
var groupid='{{$servergroup}}';
function filteruser(){
var username = document.getElementById('username').value;
var gid=0;
{{if $_config.LDAP}}
{{if $_config.TREEMODE}}
var obj1=document.getElementById('sgroupid');
gid=obj1.value;
{{else}}
for(var i=1; true; i++){
var obj=document.getElementById('sgroupid'+i);
if(obj!=null&&obj.options.selectedIndex>-1){
gid=obj.options[obj.options.selectedIndex].value;
continue;
}
break;
}
{{/if}}
{{/if}}
prefgroupid=gid;
var url = 'admin.php?controller=admin_member&action=batchpriorityedit&sgroupid='+gid+"&username="+username;
var checks = document.getElementsByTagName('input');
for(var i=0; i<checks.length; i++){
if(checks[i].name=='uid[]'&&checks[i].checked){
url += '&uid[]='+checks[i].value;
}
}
window.location=url;
}
</script>
<style type="text/css">
a {
color: #003499;
text-decoration: none;
}
a:hover {
color: #000000;
text-decoration: underline;
}
</style>
</head>
<SCRIPT language=javascript src="{{$template_root}}/images/selectdate.js"></SCRIPT>
<body>
<table width="100%" border="0" cellspacing="0" cellpadding="0">
<tr><td valign="middle" class="hui_bj"><div class="menu" style="width:1100px;">
<ul>
<li class="me_{{if $smarty.session.RADIUSUSERLIST}}b{{else}}a{{/if}}"><img src="{{$template_root}}/images/an1{{if $smarty.session.RADIUSUSERLIST}}1{{/if}}.jpg" align="absmiddle"/><a href="admin.php?controller=admin_member">用户管理</a><img src="{{$template_root}}/images/an3{{if $smarty.session.RADIUSUSERLIST}}3{{/if}}.jpg" align="absmiddle"/></li>
<li class="me_b"><img src="{{$template_root}}/images/an11.jpg" align="absmiddle"/><a href="admin.php?controller=admin_pro&action=dev_index">设备管理</a><img src="{{$template_root}}/images/an33.jpg" align="absmiddle"/></li>
<li class="me_b"><img src="{{$template_root}}/images/an11.jpg" align="absmiddle"/><a href="admin.php?controller=admin_pro&action=dev_group">目录管理</a><img src="{{$template_root}}/images/an33.jpg" align="absmiddle"/></li>
<li class="me_b"><img src="{{$template_root}}/images/an11.jpg" align="absmiddle"/><a href="admin.php?controller=admin_member&action=workdept">用户属性</a><img src="{{$template_root}}/images/an33.jpg" align="absmiddle"/></li>
<li class="me_b"><img src="{{$template_root}}/images/an11.jpg" align="absmiddle"/><a href="admin.php?controller=admin_pro&action=systemtype">系统类型</a><img src="{{$template_root}}/images/an33.jpg" align="absmiddle"/></li>
<li class="me_b"><img src="{{$template_root}}/images/an11.jpg" align="absmiddle"/><a href="admin.php?controller=admin_pro&action=sshkey">SSH公私钥</a><img src="{{$template_root}}/images/an33.jpg" align="absmiddle"/></li>
<li class="me_{{if $smarty.session.RADIUSUSERLIST}}a{{else}}b{{/if}}"><img src="{{$template_root}}/images/an1{{if !$smarty.session.RADIUSUSERLIST}}1{{/if}}.jpg" align="absmiddle"/><a href="admin.php?controller=admin_member&action=radiususer">RADIUS用户</a><img src="{{$template_root}}/images/an3{{if !$smarty.session.RADIUSUSERLIST}}3{{/if}}.jpg" align="absmiddle"/></li>
<li class="me_b"><img src="{{$template_root}}/images/an11.jpg" align="absmiddle"/><a href="admin.php?controller=admin_pro&action=passwordkey">密码密钥</a><img src="{{$template_root}}/images/an33.jpg" align="absmiddle"/></li>
{{if $smarty.session.ADMIN_LEVEL eq 1}}
<li class="me_b"><img src="{{$template_root}}/images/an11.jpg" align="absmiddle"/><a href="admin.php?controller=admin_member&action=online">在线用户</a><img src="{{$template_root}}/images/an33.jpg" align="absmiddle"/></li>
{{/if}}
</ul><span class="back_img"><A href="admin.php?controller=admin_member&back=1"><IMG src="{{$template_root}}/images/back1.png"
width="80" height="30" border="0"></A></span>
</div></td></tr>
<tr>
<td class=""><table width="100%" border="0" cellspacing="0" cellpadding="0" >
<tr>
<td align="center"><form name="f1" method=post action="admin.php?controller=admin_member&action=batchpriorityeditsave&chk_member={{$usersid}}" enctype="multipart/form-data" onsubmit="return confirm('确定操作?');">
<table border=0 width=100% cellpadding=5 cellspacing=0 bgcolor="#FFFFFF" valign=top class="BBtable">
<tr><th colspan="3" class="list_bg"></th></tr>
<tr><td colspan="5" align=center><div style="text-align:left;width:500px;">过滤用户:<input type="text" name="username" id="username" >
{{assign var=select_group_id value='sgroupid'}}
{{include file="select_sgroup_ajax.tpl" }}
<input type="button" onclick="filteruser();" value="提交" ></div></td></tr>
{{assign var="trnumber" value=$trnumber+1}}
<tr {{if $trnumber % 2 == 0}}bgcolor="f7f7f7"{{/if}} id="loginmodetr">
<td width="3%" align="center">
权限
</td>
<td width="97%">
<table width="100%">
<TR bgcolor="#f7f7f7">
<TD align="left"><input type="checkbox" name="enable[]" value="usergroup" onclick="enablepri(this.checked,'usergroup');" > 运维组: </TD>
<TD >
{{assign var=select_ladp_id value='ldapid'}}
{{assign var=select_group_id value='groupid'}}
{{include file="select_sgroup_ajax.tpl" }} </TD>
<td><input type="checkbox" name="enable[]" value="asyncoutpass" onclick="enablepri(this.checked,'asyncoutpass');" > 同步外部密码:</td><td><select class="wbk" id=asyncoutpass name=asyncoutpass>
<OPTION value="-1" {{if -1 eq $member.asyncoutpass}}selected{{/if}}>关闭</OPTION>
{{section name=asyn loop=11}}
<OPTION value="{{$smarty.section.asyn.index}}" {{if $smarty.section.asyn.index eq $member.asyncoutpass}}selected{{/if}}>{{$smarty.section.asyn.index}}</OPTION>
{{/section}}
</SELECT></td>
</TR>
<TR bgcolor="">
<TD align="left"><input type="checkbox" name="enable[]" value="start_time" onclick="enablepri(this.checked,'start_time');" > 生效时间: </TD>
<TD><INPUT value="{{$member.start_time}}" id="start_time" name="start_time" >
<input type="button" id="f_rangeStart_trigger" name="f_rangeStart_trigger" value="选择时间" class="wbk">
</TD>
<TD align="left"><input type="checkbox" name="enable[]" value="limit_time" onclick="enablepri(this.checked,'limit_time');" > 过期时间:</TD>
<TD><INPUT value="{{if $member.end_time ne '2037-01-01 00:00:00'}}{{$member.end_time}}{{/if}}" id="limit_time" name="limit_time" onFocus="setday(this)"> <input type="button" id="f_rangeEnd_trigger" name="f_rangeEnd_trigger" value="选择时间" class="wbk">
<script type="text/javascript">
var cal = Calendar.setup({
onSelect: function(cal) { cal.hide() },
showTime: true,
popupDirection:'down'
});
var cal2 = Calendar.setup({
onSelect: function(cal2) { cal2.hide() },
showTime: true,
popupDirection:'down'
});
cal.manageFields("f_rangeStart_trigger", "start_time", "%Y-%m-%d %H:%M:%S");
cal2.manageFields("f_rangeEnd_trigger", "limit_time", "%Y-%m-%d %H:%M:%S");
</script>
{{$language.AlwaysValid}}<INPUT value="1" {{if $member.end_time eq '2037-01-01 00:00:00' or !$member.end_time}} checked {{/if}} onclick="document.getElementById('limit_time').value=''" type=checkbox name="nolimit" id="nolimit">
</TD>
</TR>
<TR bgcolor="#f7f7f7">
<TD align="left"><input type="checkbox" name="enable[]" value="ipv4" onclick="enablepri(this.checked,'ipv4');" > 来源IPv4:</TD>
<TD><select class="wbk" name=sourceip id=sourceip>
<OPTION value="">{{$language.no}}</OPTION>
{{section name=t loop=$sourceip}}
{{if !$sourceip[t].ipv6}}
<option value="{{$sourceip[t].groupname}}" {{if $sourceip[t].groupname == $member.sourceip}}selected{{/if}}>{{$sourceip[t].groupname}}</option>
{{/if}}
{{/section}}
</SELECT>
</TD>
<TD align="left"><input type="checkbox" name="enable[]" value="ipv6" onclick="enablepri(this.checked,'ipv6');" > 来源IPv6:</TD>
<TD><select class="wbk" name=sourceipv6 id=sourceipv6>
<OPTION value="">{{$language.no}}</OPTION>
{{section name=t loop=$sourceip}}
{{if $sourceip[t].ipv6}}
<option value="{{$sourceip[t].groupname}}" {{if $sourceip[t].groupname == $member.sourceipv6}}selected{{/if}}>{{$sourceip[t].groupname}}</option>
{{/if}}
{{/section}}
</SELECT></TD>
</TR>
<TR bgcolor="">
<TD align="left"><input type="checkbox" name="enable[]" value="weektime" onclick="enablepri(this.checked,'weektime');" > 周组策略:</TD>
<TD><select class="wbk" id=weektime name=weektime>
<OPTION value="">{{$language.no}}</OPTION>
{{section name=k loop=$weektime}}
<option value="{{$weektime[k].policyname}}" {{if $weektime[k].policyname == $member.weektime}}selected{{/if}}>{{$weektime[k].policyname}}</option>
{{/section}}
</SELECT> </TD>
<TD align="left"><input type="checkbox" name="enable[]" value="enable" onclick="enablepri(this.checked,'enable');" > 启用:</TD>
<TD><input type="checkbox" name="_enable" id="enable" value="1" {{if $member.enable || !$member.uid }}checked{{/if}}></TD>
</TR>
<TR bgcolor="#f7f7f7">
<TD align="left"><input type="checkbox" name="enable[]" value="apphost" onclick="enablepri(this.checked,'apphost');" > 显示应用发布IP:</TD>
<TD><input type="checkbox" id="apphost" name="apphost" value="1" {{if $member.apphost}}checked{{/if}}></TD>
<TD align="left"><input type="checkbox" name="enable[]" value="loginauth" onclick="enablepri(this.checked,'loginauth');" > 认证方式</TD>
<TD><input type="checkbox" name="localauth" id="localauth" class="" value="1" {{if $member.localauth}}checked{{/if}}>认证 <input type="checkbox" id="radiusauth" name="radiusauth" class="" value="1" {{if $member.radiusauth}}checked{{/if}}>RADIUS <input type="checkbox" name="ldapauth" id="ldapauth" class="" value="1" {{if $member.ldapauth}}checked{{/if}}>LDAP <input type="checkbox" name="adauth" id="adauth" class="" value="1" {{if $member.adauth}}checked{{/if}}>AD <input type="checkbox" name="auth" id="auth" class="" value="2" {{if $member.auth eq 2}}checked{{/if}}>短信 <select name="authtype" id="authtype" ><option value="0" {{if !$member.authtype}}selected{{/if}}>单一认证</option><option value="1" {{if $member.authtype}}selected{{/if}}>组合认证</option></select></TD>
</TR>
<TR>
<TD align="left" bordercolor="white"><input type="checkbox" name="enable[]" value="rdpclipboard" onclick="enablepri(this.checked,'rdpclipboard');" > RDP剪贴版</TD>
<TD bordercolor="white">上行:<input type="checkbox" name="rdpclipauth_up" id="rdpclipauth_up" class="" value="1" {{if $member.rdpclipauth_up or !$member.uid}}checked{{/if}}> 下行:<input type="checkbox" name="rdpclipauth_down" id="rdpclipauth_down" class="" value="1" {{if $member.rdpclipauth_down or !$member.uid}}checked{{/if}}>
</TD>
<TD align="left" bordercolor="white"><input type="checkbox" name="enable[]" value="rdpdiskauth_up" onclick="enablepri(this.checked,'rdpdiskauth_up');" > RDP磁盘:</TD>
<TD bordercolor="white"><input type="checkbox" name="rdpdiskauth_up" id="rdpdiskauth_up" class="" value="1" {{if $member.rdpdiskauth_up or !$member.uid}}checked{{/if}}>
</TD>
</TR>
<TR bgcolor="#f7f7f7">
<TD align="left"><input type="checkbox" name="enable[]" value="rdpdisk" onclick="enablepri(this.checked,'rdpdisk');" > RDP磁盘映射:</TD>
<TD><input type="text" name="rdpdisk" id="rdpdisk" class="input_shorttext" value="{{if !$member.uid}}*{{else}}{{$member.rdpdisk}}{{/if}}">例子C:;D:;E:;</TD>
<TD align="left"><input type="checkbox" name="enable[]" value="allowchange" onclick="enablepri(this.checked,'allowchange');" > 允许改密:</TD>
<TD><input type="checkbox" id="allowchange" name="allowchange" value="1" {{if $member.allowchange}}checked{{/if}}> </TD>
</TR>
<TR>
<TD align="left"><input type="checkbox" name="enable[]" value="rdplocal" onclick="enablepri(this.checked,'rdplocal');" > rdp本地:</TD>
<TD><input type="checkbox" name="rdplocal" id="rdplocal" value="1" {{if $member.rdplocal }}checked{{/if}}></TD>
<TD align="left"><input type="checkbox" name="enable[]" value="passwordsave" onclick="enablepri(this.checked,'passwordsave');" > 系统用户名缓存:</TD>
<TD><input type="checkbox" name="passwordsave" id="passwordsave" value="1" {{if $member.passwordsave}}checked{{/if}}></TD>
</TR>
<TR bgcolor="#f7f7f7">
<TD align="left" bordercolor="white"><input type="checkbox" name="enable[]" value="default_control" onclick="enablepri(this.checked,'default_control');" > 默认控件: </TD>
<TD bordercolor="white"><select class="wbk" name=default_control id=default_control>
<OPTION value="0" {{if $member.default_control eq 0}}selected{{/if}}>自动检测</OPTION>
<OPTION value="1" {{if $member.default_control eq 1}}selected{{/if}}>applet</OPTION>
<OPTION value="2" {{if $member.default_control eq 2}}selected{{/if}}>activeX</OPTION>
</SELECT>
</TD>
<TD align="left" bordercolor="white"> <input type="checkbox" name="enable[]" value="rdplocalcheck" onclick="enablepri(this.checked,'rdplocalcheck');" > 默认RDP本地</TD>
<TD bordercolor="white"><input type="checkbox" id="rdplocalcheck" name="rdplocalcheck" value="1" {{if $member.rdplocalcheck}}checked{{/if}}>
</TD>
</TR>
<TR>
<TD align="left"><input type="checkbox" name="enable[]" value="default_appcontrol" onclick="enablepri(this.checked,'default_appcontrol');" > 应用发布默认控件:</TD>
<TD><select class="wbk" name=default_appcontrol id="default_appcontrol">
<OPTION value="0" {{if $member.default_appcontrol eq 0}}selected{{/if}}>WEB</OPTION>
<OPTION value="1" {{if $member.default_appcontrol eq 1}}selected{{/if}}>RDP</OPTION>
</SELECT>
</TD>
<TD align="left"><input type="checkbox" name="enable[]" value="restrictweb" onclick="enablepri(this.checked,'restrictweb');" > 限制工具登录:</TD>
<TD><input type="checkbox" name="restrictweb" id="restrictweb" value="1" {{if $member.restrictweb}}checked{{/if}}>
</TD>
</TR>
<TR bgcolor="#f7f7f7">
<TD align="left"><input type="checkbox" name="enable[]" value="firstauth" onclick="enablepri(this.checked,'firstauth');" > 优先登录方式:</TD>
<TD>
<select class="wbk" name=firstauth id=firstauth >
<OPTION value="localauth" {{if $member.firstauth eq 'localauth'}}selected{{/if}}>本地登录</OPTION>
<OPTION value="radiusauth" {{if $member.firstauth eq 'radiusauth'}}selected{{/if}}>RADIUS</OPTION>
{{section name=l loop=$ldaps}}
<option value='ldapauth_{{$ldaps[l].address}}' {{if $member.firstauth eq 'ldapauth_'|cat:$ldaps[l].address}}selected{{/if}}>LDAP {{$ldaps[l].domain}}</option>
{{/section}}
{{section name=a loop=$ads}}
<option value='adauth_{{$ads[a].address}}' {{if $member.firstauth eq 'adauth_'|cat:$ads[a].address}}selected{{/if}}>AD {{$ads[a].domain}}</option>
{{/section}}
</SELECT>
</TD>
<TD align="left"><input type="checkbox" name="enable[]" value="apptoadmingroup" onclick="enablepri(this.checked,'apptoadmingroup');" > 应用发布用户为管理员:</TD>
<TD><input type="checkbox" id="apptoadmingroup" name="apptoadmingroup" value="1" {{if $member.apptoadmingroup}}checked{{/if}}>
</TD>
</TR>
<TR bgcolor="">
<TD align="left"><input type="checkbox" name="enable[]" value="apptodisk" onclick="enablepri(this.checked,'apptodisk');" > 应用发布进入桌面:</TD>
<TD><input type="checkbox" id="apptodisk" name="apptodisk" value="1" {{if $member.apptodisk}}checked{{/if}}>
</TD>
<TD align="left"><input type="checkbox" name="enable[]" value="webportal" onclick="enablepri(this.checked,'webportal');" > WEBportal认证:</TD>
<TD>启用:<input type="checkbox" id="webportal" name="webportal" value="1" {{if $member.webportal}}checked{{/if}}> Webportal超时时间:<input type="text" id="webportaltime" name="webportaltime" class="input_shorttext" value="0" style="width:100px">分钟
</TD>
</TR>
<TR bgcolor="#f7f7f7">
<TD align="left"><input type="checkbox" name="enable[]" value="tranportauth" onclick="enablepri(this.checked,'tranportauth');" > 透明登录:</TD>
<TD>
<select class="wbk" name=tranportauth id=tranportauth >
<OPTION value="1" {{if $member.tranportauth eq '1'}}selected{{/if}}>本地</OPTION>
<OPTION value="2" {{if !$member.tranportauth or $member.tranportauth eq '2'}}selected{{/if}}>RADIUS</OPTION>
<OPTION value="3" {{if $member.tranportauth eq '3'}}selected{{/if}}>LDAP</OPTION>
</SELECT>
</TD>
<TD align="left"><input type="checkbox" name="enable[]" value="serverport" onclick="enablepri(this.checked,'serverport');" > 服务端口:</TD>
<TD>
SSH端口:<input type="text" name="sshport" id="sshport" style="width:50px;" value="" > RDP端口:<input type="text" name="rdpport" id="rdpport" style="width:50px;" value="">
</TD>
</TR>
</table>
</td>
</tr>
<tr><td colspan="5" align=center>
</td></tr>
{{assign var="trnumber" value=$trnumber+1}}
<tr {{if $trnumber % 2 == 0}}bgcolor="f7f7f7"{{/if}}>
<td width="10%" align="center" valign=top>
{{$language.bind}}{{$language.User}}
<table border=0 width="100%" style="border:0px;">
<tr><td align="right" style="border-bottom:0px;border-top:0px;border-left:0px;border-right:0px;"><input type="button" name='batchselect' class="an_06" value="批量选择" onclick="window.open('admin.php?controller=admin_pro&action=xzuser', 'newwindow','height=650, width=700, top=0, left=0, toolbar=no, menubar=no, scrollbars=no, resizable=yes,location=no, status=no');" ></td></tr>
<tr><td align="right" style="border-bottom:0px;border-top:0px;border-left:0px;border-right:0px;">全选<input type="checkbox" value=2 onclick="checkAll(this.checked);"></td></tr>
</table>
</td>
<td width="90%">
<table><tr >
{{section name=g loop=$users}}
<td width="150"><input type="checkbox" name='uid[]' value='{{$users[g].uid}}' {{$users[g].check}}>{{$users[g].username}}({{if $users[g].realname}}{{$users[g].realname}}{{else}}未设置{{/if}}){{if $users[g].binded}}</font>{{/if}}</td>{{if ($smarty.section.g.index +1) % 6 == 0}}</tr><tr>{{/if}}
{{/section}}
</tr></table>
</td>
</tr>
<tr><td colspan="2" align="center"><input type=submit name="submit" value="批量导出" class="an_02"> <input type=submit name="submit" value="批量删除" class="an_02"> <input type=submit name="submit" value="批量锁定" class="an_02"> <input type=submit name="submit" value="{{$language.Save}}" class="an_02"></td></tr></table>
</form>
</td>
</tr>
<tr><td colspan="2" height="25"></td></tr>
</table>
</body>
<script>
function checkAll(c){
var targets = document.getElementsByTagName('input');
for(var j=0; j<targets.length; j++){
if(targets[j].name.substring(0,3)=='uid'){
targets[j].checked=c;
}
}
}
enablepri(false, 'usergroup');
enablepri(false, 'start_time');
enablepri(false, 'limit_time');
enablepri(false, 'ipv4');
enablepri(false, 'ipv6');
enablepri(false, 'enable');
enablepri(false, 'weektime');
enablepri(false, 'restrictweb');
enablepri(false, 'apphost');
enablepri(false, 'loginauth');
enablepri(false, 'rdpclipboard');
enablepri(false, 'rdpdiskauth_up');
enablepri(false, 'rdpdisk');
enablepri(false, 'allowchange');
enablepri(false, 'rdplocal');
enablepri(false, 'passwordsave');
enablepri(false, 'default_control');
enablepri(false, 'rdplocalcheck');
enablepri(false, 'default_appcontrol');
enablepri(false, 'firstauth');
enablepri(false, 'apptoadmingroup');
enablepri(false, 'apptodisk');
enablepri(false, 'webportal');
enablepri(false, 'asyncoutpass');
enablepri(false, 'tranportauth');
enablepri(false, 'serverport');
{{if $_config.LDAP}}
{{$changelevelstr}}
{{/if}}
</script>
<iframe name="hide" id="hide" height="0" frameborder="0" scrolling="no"></iframe>
</html>
|
{
"pile_set_name": "Github"
}
|
/**
* 作者: 家
* QQ: 203118908
* 功能: bmob用户表的增删改查
*/
var config = {
appId: '',
restKey: ''
}
var isLog = true
function myLog() {
if (isLog) {
log.apply(this, Array.prototype.slice.call(arguments))
}
}
var appId = config.appId
var restKey = config.restKey
// 测试开始==========================================================
userlist = [{
name: 'name1',
password: 'password1',
data: 'data1'
},
{
name: 'name2',
password: 'password2',
data: 'data2'
},
{
name: 'name3',
password: 'password3',
data: 'data3'
}
]
// 注册3个用户
for (var i = 0; i < userlist.length; i++) {
var user = userlist[i]
var username = user.name
var password = user.password
var postdata = user.data
var result = 用户注册并上传数据(username, password, postdata)
log(result)
}
var username = userlist[0].name
var password = userlist[0].password
var postdata = userlist[0].data
log(username, password, postdata)
// 登录user1
var result = 用户登录(username, password)
log(util.format('用户登录结果=%s', result))
var result = 获取用户的objectIdAndSessionToken(username, password)
log(util.format('获取用户的objectIdAndSessionToken=%s', result))
var objectId = result.objectId
var sessionToken = result.sessionToken
log('当前使用的变量键值对=')
log('username=' + username)
log('password=' + password)
log('postdata=' + postdata)
log('objectId=' + objectId)
log('sessionToken=' + sessionToken)
var result = 查询用户名是否存在(username)
log(util.format('查询用户名是否存在=%s', result))
var result = 获取当前用户信息(objectId)
log(util.format('获取当前用户信息=%s', result))
var result = 改动一个用户已经有的数据(objectId, sessionToken, {
"data": "data1666666666666"
})
log(util.format('改动一个用户已经有的数据=%s', result))
var result = 删除用户(objectId, sessionToken)
log(util.format('删除用户=%s', result))
// 测试结束==========================================================
// POST 用户注册、使用手机号注册登录、第三方注册登录
function 用户注册并上传数据(username, password, postdata) {
var postdata = postdata || ''
var url = 'https://api2.bmob.cn/1/users'
url = encodeURI(url)
var options = {}
options.contentType = "application/json";
options.method = 'POST';
options.headers = {
"X-Bmob-Application-Id": appId,
"X-Bmob-REST-API-Key": restKey,
"Content-Type": "application/json"
}
var data = {
"username": username,
"password": password,
"data": postdata
}
options.body = JSON.stringify(data)
var r = http.request(url, options, null).body.json()
// 202 用户名已存在
if (r && r.createdAt) {
myLog('用户注册并上传数据成功')
myLog(r)
return true
} else {
return false
}
}
// GET 登录
function 用户登录(username, password) {
var url = util.format('https://api2.bmob.cn/1/login?username=%s&password=%s', username, password)
url = encodeURI(url)
var r = http.get(url, {
headers: {
"X-Bmob-Application-Id": appId,
"X-Bmob-REST-API-Key": restKey,
"Content-Type": "application/json"
}
}).body.json()
myLog(r)
if (r && r.createdAt && r.username === username) {
myLog('用户登录成功')
return r
} else {
myLog('用户登录失败')
return false
}
}
// 获取用户的 objectId + sessionToken
function 获取用户的objectIdAndSessionToken(username, password) {
var result = 用户登录(username, password)
if (result) {
var objectId = result.objectId
var sessionToken = result.sessionToken
var info = {
objectId: objectId,
sessionToken: sessionToken
}
return info
}
}
function 查询用户名是否存在(username) {
var url = util.format('https://api2.bmob.cn/1/users?where={"username":"%s"}', username)
myLog('查询用户名是否存在url', url)
url = encodeURI(url)
var r = http.get(url, {
headers: {
"X-Bmob-Application-Id": appId,
"X-Bmob-REST-API-Key": restKey,
"Content-Type": "application/json"
}
}).body.json()
if (r.results && r.results.length > 0 && r.results[0].username === username) {
myLog('用户名已存在')
return true
} else {
myLog('用户名不存在')
return false
}
}
// GET 获取当前用户、查询用户
function 获取当前用户信息(objectId) {
var url = 'https://api2.bmob.cn/1/users/' + objectId
myLog('获取当前用户信息url', url)
url = encodeURI(url)
var r = http.get(url, {
headers: {
"X-Bmob-Application-Id": appId,
"X-Bmob-REST-API-Key": restKey,
"Content-Type": "application/json"
}
}).body.json()
// myLog(r)
if (r && r.objectId === objectId) {
myLog('获取当前用户信息成功')
return true
} else {
myLog('获取当前用户信息失败')
return false
}
}
// 为了改动一个用户已经有的数据,需要对这个用户的URL发送一个HTTP PUT请求,任何你没有指定的key会保持不变,所以你可以只改动用户信息中的一部分,username和password可以更改,但是新的username不能重复。
function 改动一个用户已经有的数据(objectId, sessionToken, 要修改的数据键值对_也就是一个json对象) {
var postdata = postdata || ''
var url = 'https://api2.bmob.cn/1/users/' + objectId
url = encodeURI(url)
var options = {}
options.contentType = "application/json";
options.method = 'PUT';
options.headers = {
"X-Bmob-Application-Id": appId,
"X-Bmob-REST-API-Key": restKey,
"Content-Type": "application/json",
"X-Bmob-Session-Token": sessionToken
}
options.body = JSON.stringify(要修改的数据键值对_也就是一个json对象)
var r = http.request(url, options, null).body.json()
if (r && r.updatedAt) {
myLog('改动一个用户已经有的数据成功')
myLog(r)
return true
} else {
return false
}
}
// 删除用户
function 删除用户(objectId, sessionToken) {
var postdata = postdata || ''
var url = 'https://api2.bmob.cn/1/users/' + objectId
url = encodeURI(url)
var options = {}
options.contentType = "application/json";
options.method = 'DELETE';
options.headers = {
"X-Bmob-Application-Id": appId,
"X-Bmob-REST-API-Key": restKey,
"Content-Type": "application/json",
"X-Bmob-Session-Token": sessionToken
}
var r = http.request(url, options, null).body.json()
if (r && r.msg && r.msg === 'ok') {
myLog('删除用户成功')
myLog(r)
return true
} else {
return false
}
}
|
{
"pile_set_name": "Github"
}
|
# -*- coding: utf-8 -*-
from fontTools.misc.py23 import *
import os
import unittest
from fontTools.ttLib import TTFont
from fontTools.misc.xmlReader import XMLReader, ProgressPrinter, BUFSIZE
import tempfile
class TestXMLReader(unittest.TestCase):
def test_decode_utf8(self):
class DebugXMLReader(XMLReader):
def __init__(self, fileOrPath, ttFont, progress=None):
super(DebugXMLReader, self).__init__(
fileOrPath, ttFont, progress)
self.contents = []
def _endElementHandler(self, name):
if self.stackSize == 3:
name, attrs, content = self.root
self.contents.append(content)
super(DebugXMLReader, self)._endElementHandler(name)
expected = 'fôôbär'
data = '''\
<?xml version="1.0" encoding="UTF-8"?>
<ttFont>
<name>
<namerecord nameID="1" platformID="3" platEncID="1" langID="0x409">
%s
</namerecord>
</name>
</ttFont>
''' % expected
with BytesIO(data.encode('utf-8')) as tmp:
reader = DebugXMLReader(tmp, TTFont())
reader.read()
content = strjoin(reader.contents[0]).strip()
self.assertEqual(expected, content)
def test_normalise_newlines(self):
class DebugXMLReader(XMLReader):
def __init__(self, fileOrPath, ttFont, progress=None):
super(DebugXMLReader, self).__init__(
fileOrPath, ttFont, progress)
self.newlines = []
def _characterDataHandler(self, data):
self.newlines.extend([c for c in data if c in ('\r', '\n')])
# notice how when CR is escaped, it is not normalised by the XML parser
data = (
'<ttFont>\r' # \r -> \n
' <test>\r\n' # \r\n -> \n
' a line of text\n' # \n
' escaped CR and unix newline \n' # \n -> \r\n
' escaped CR and macintosh newline \r' # \r -> \r\n
' escaped CR and windows newline \r\n' # \r\n -> \r\n
' </test>\n' # \n
'</ttFont>')
with BytesIO(data.encode('utf-8')) as tmp:
reader = DebugXMLReader(tmp, TTFont())
reader.read()
expected = ['\n'] * 3 + ['\r', '\n'] * 3 + ['\n']
self.assertEqual(expected, reader.newlines)
def test_progress(self):
class DummyProgressPrinter(ProgressPrinter):
def __init__(self, title, maxval=100):
self.label = title
self.maxval = maxval
self.pos = 0
def set(self, val, maxval=None):
if maxval is not None:
self.maxval = maxval
self.pos = val
def increment(self, val=1):
self.pos += val
def setLabel(self, text):
self.label = text
data = (
'<ttFont>\n'
' <test>\n'
' %s\n'
' </test>\n'
'</ttFont>\n'
% ("z" * 2 * BUFSIZE)
).encode('utf-8')
dataSize = len(data)
progressBar = DummyProgressPrinter('test')
with BytesIO(data) as tmp:
reader = XMLReader(tmp, TTFont(), progress=progressBar)
self.assertEqual(progressBar.pos, 0)
reader.read()
self.assertEqual(progressBar.pos, dataSize // 100)
self.assertEqual(progressBar.maxval, dataSize // 100)
self.assertTrue('test' in progressBar.label)
with BytesIO(b"<ttFont></ttFont>") as tmp:
reader = XMLReader(tmp, TTFont(), progress=progressBar)
reader.read()
# when data size is less than 100 bytes, 'maxval' is 1
self.assertEqual(progressBar.maxval, 1)
def test_close_file_path(self):
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.write(b'<ttFont></ttFont>')
reader = XMLReader(tmp.name, TTFont())
reader.read()
# when reading from path, the file is closed automatically at the end
self.assertTrue(reader.file.closed)
# this does nothing
reader.close()
self.assertTrue(reader.file.closed)
os.remove(tmp.name)
def test_close_file_obj(self):
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp.write(b'<ttFont>"hello"</ttFont>')
with open(tmp.name, "rb") as f:
reader = XMLReader(f, TTFont())
reader.read()
# when reading from a file or file-like object, the latter is kept open
self.assertFalse(reader.file.closed)
# ... until the user explicitly closes it
reader.close()
self.assertTrue(reader.file.closed)
os.remove(tmp.name)
def test_read_sub_file(self):
# Verifies that sub-file content is able to be read to a table.
expectedContent = 'testContent'
expectedNameID = '1'
expectedPlatform = '3'
expectedLangId = '0x409'
with tempfile.NamedTemporaryFile(delete=False) as tmp:
subFileData = (
'<ttFont ttLibVersion="3.15">'
'<name>'
'<namerecord nameID="%s" platformID="%s" platEncID="1" langID="%s">'
'%s'
'</namerecord>'
'</name>'
'</ttFont>'
) % (expectedNameID, expectedPlatform, expectedLangId, expectedContent)
tmp.write(subFileData.encode("utf-8"))
with tempfile.NamedTemporaryFile(delete=False) as tmp2:
fileData = (
'<ttFont ttLibVersion="3.15">'
'<name>'
'<namerecord src="%s"/>'
'</name>'
'</ttFont>'
) % tmp.name
tmp2.write(fileData.encode('utf-8'))
ttf = TTFont()
with open(tmp2.name, "rb") as f:
reader = XMLReader(f, ttf)
reader.read()
reader.close()
nameTable = ttf['name']
self.assertTrue(int(expectedNameID) == nameTable.names[0].nameID)
self.assertTrue(int(expectedLangId, 16) == nameTable.names[0].langID)
self.assertTrue(int(expectedPlatform) == nameTable.names[0].platformID)
self.assertEqual(expectedContent, nameTable.names[0].string.decode(nameTable.names[0].getEncoding()))
os.remove(tmp.name)
os.remove(tmp2.name)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
{
"pile_set_name": "Github"
}
|
#ifndef NODE_STATE_H
#define NODE_STATE_H
#include "DataflowCFG.h"
class NodeFact;
class NodeState;
#include "lattice.h"
#include "analysis.h"
#include <map>
#include <vector>
#include <string>
#include <set>
#ifdef THREADED
#include "tbb/concurrent_hash_map.h"
#include "tbb/atomic.h"
#endif
//template<class factType>
/************************************************
*** NodeFact ***
*** A fact associated with a CFG node by ***
*** some analysis thatis not evolved as part ***
*** of a dataflow analysis (i.e. it should ***
*** stay constant throughout the analysis). ***
************************************************/
// A fact associated with a CFG node that is not part of a dataflow analysis. In other words,
// it is not a lattice and is not meant to evolve during the course of a dataflow analysis.
class NodeFact: public printable
{
public:
// The string that represents this object.
// Every line of this string must be prefixed by indent.
// The last character of the returned string must not be '\n', even if it is a multi-line string.
//virtual string str(string indent="")=0;
// returns a copy of this node fact
virtual NodeFact* copy() const=0;
/* void* fact;
public:
NodeFact(void* fact)
{
this->fact = fact;
}
NodeFact(factType* fact)
{
this->fact = *fact;
}
void* getFact()
{
return fact;
}*/
};
/**********************************************
*** NodeState ***
*** The state of all the Lattice elements ***
*** associated by dataflow analyses with a ***
*** given node. This state will evolve as ***
*** a result of the dataflow analysis. ***
**********************************************/
#ifdef THREADED
class NodeStateHashCompare
{
public:
NodeStateHashCompare() {}
NodeStateHashCompare(const NodeStateHashCompare & that) {}
~NodeStateHashCompare(){}
static bool equal(const Analysis* & j, const Analysis* & k )
{ return j==k; }
static bool equal(const Analysis* const & j, const Analysis* const & k )
{ return j==k; }
static size_t hash( const Analysis* k ) { return (size_t) k; }
};
#endif
class NodeState
{
#ifdef THREADED
typedef tbb::concurrent_hash_map <Analysis*, std::vector<Lattice*>, NodeStateHashCompare > LatticeMap;
//typedef tbb::concurrent_hash_map <Analysis*, map <int, NodeFact*>, NodeStateHashCompare > NodeFactMap;
typedef tbb::concurrent_hash_map <Analysis*, std::vector<NodeFact*>, NodeStateHashCompare > NodeFactMap;
typedef tbb::concurrent_hash_map <Analysis*, bool, NodeStateHashCompare > BoolMap;
#else
typedef std::map<Analysis*, std::vector<Lattice*> > LatticeMap;
//typedef std::map<Analysis*, std::map<int, NodeFact*> > NodeFactMap;
typedef std::map<Analysis*, std::vector<NodeFact*> > NodeFactMap;
typedef std::map<Analysis*, bool > BoolMap;
#endif
// the dataflow information Above the node, for each analysis that
// may be interested in the current node
LatticeMap dfInfoAbove;
// the Analysis information Below the node, for each analysis that
// may be interested in the current node
LatticeMap dfInfoBelow;
// the facts that are true at this node, for each analysis that
// may be interested in the current node
NodeFactMap facts;
// Contains all the Analyses that have initialized their state at this node. It is a map because
// TBB doesn't provide a concurrent set.
BoolMap initializedAnalyses;
// the dataflow node that this NodeState object corresponds to
//DataflowNode parentNode;
public:
/*NodeState(DataflowNode& parentNode) : parentNode(parentNode)
{}
NodeState(CFGNode& parentNode) : parentNode(parentNode)
{}
NodeState(CFGNode parentNode) : parentNode(parentNode)
{}*/
NodeState()
{}
/* void initialize(Analysis* analysis, int latticeName)
{
initDfMap(dfInfoAbove);
initDfMap(dfInfoBelow);
}
private:
// initializes the given lattice owned by the given analysis in the given map
// dfMap may be either dfInfoAbove or dfInfoBelow
void initDfMap(std::map<Analysis*, std::vector<Lattice*> >& dfMap)
{
std::map<Analysis*, std::vector<Lattice*> >::iterator dfLattices;
// if this analysis has registered some Lattices at this node
if((dfLattices = dfMap.find(analysis)) != dfInfoAbove.end())
{
std::map<int, Lattice>::iterator it;
// if the given lattice name was registered by this analysis
if((it = (*dfLattices).find(latticeName) != (*dfLattices).end())
{
(*it)->initialize();
}
else
{
(*dfLattices)[latticeName] = new Lattice();
}
}
else
{
std::map<int, Lattice> newMap;
Lattice newLattice;
newMap[latticeName] = newLattice;
dfMap[analysis] = newMap;
}
}*/
public:
// Records that this analysis has initializedAnalyses its state at this node
void initialized(Analysis* analysis);
// Returns true if this analysis has initialized its state at this node and false otherwise
bool isInitialized(Analysis* analysis);
// adds the given lattice, organizing it under the given analysis and lattice name
//void addLattice(const Analysis* analysis, int latticeName, Lattice* l);
// Set this node's lattices for this analysis (possibly above or below only, replacing previous mappings)
// These methods take ownership of the pointed-to lattices.
void setLattices(const Analysis* analysis, std::vector<Lattice*>& lattices);
void setLatticeAbove(const Analysis* analysis, std::vector<Lattice*>& lattices);
void setLatticeBelow(const Analysis* analysis, std::vector<Lattice*>& lattices);
// returns the given lattice from above the node that is owned by the given analysis
Lattice* getLatticeAbove(const Analysis* analysis, int latticeName) const;
// returns the given lattice from below the node that is owned by the given analysis
Lattice* getLatticeBelow(const Analysis* analysis, int latticeName) const;
//! returns all the lattices from above the CFG node (corresponding to SgNode and an CFG index) that are owned by the given analysis
// (read-only access)
static const std::vector<Lattice*>& getLatticeAbove(const Analysis* analysis, SgNode* n, unsigned int index ) ;
// returns all the lattices from below the CFG node (corresponding to SgNode and an CFG index) that are owned by the given analysis
// (read-only access)
static const std::vector<Lattice*>& getLatticeBelow(const Analysis* analysis, SgNode* n, unsigned int index) ;
// returns the map containing all the lattices from above the node that are owned by the given analysis
// (read-only access)
const std::vector<Lattice*>& getLatticeAbove(const Analysis* analysis) const;
// returns the map containing all the lattices from below the node that are owned by the given analysis
// (read-only access)
const std::vector<Lattice*>& getLatticeBelow(const Analysis* analysis) const;
// returns the map containing all the lattices from above the node that are owned by the given analysis
// (read/write access)
std::vector<Lattice*>& getLatticeAboveMod(const Analysis* analysis);
// returns the map containing all the lattices from below the node that are owned by the given analysis
// (read/write access)
std::vector<Lattice*>& getLatticeBelowMod(const Analysis* analysis);
// deletes all lattices above this node associated with the given analysis
void deleteLatticeAbove(const Analysis* analysis);
// deletes all lattices below this node associated with the given analysis
void deleteLatticeBelow(const Analysis* analysis);
// returns true if the two lattices vectors are the same and false otherwise
static bool eqLattices(const std::vector<Lattice*>& latticesA,
const std::vector<Lattice*>& latticesB);
// Creates a copy of all the dataflow state (Lattices and Facts) associated with
// analysis srcA and associates this copied state with analysis tgtA.
void cloneAnalysisState(const Analysis* srcA, const Analysis* tgtA);
// Given a set of analyses, one of which is designated as a master, unions together the
// lattices associated with each of these analyses. The results are associated on each
// CFG node with the master analysis.
void unionLattices(std::set<Analysis*>& unionSet, const Analysis* master);
//void removeLattice(const Analysis* analysis, int latticeName);
private:
/*// adds the given lattice to the given dfInfo structure (dfInfoAbove or dfInfoBelow),
// organizing it under the given analysis and lattice name
void addLattice_ex(std::map<Analysis*, std::vector<Lattice*> >& dfMap,
const Analysis* analysis, int latticeName, Lattice* l);
*/
// returns the given lattice, which owned by the given analysis
Lattice* getLattice_ex(const LatticeMap& dfMap,
const Analysis* analysis, int latticeName) const;
/*// removes the given lattice, owned by the given analysis
// returns true if the given lattice was found and removed and false if it was not found
bool removeLattice_ex(LatticeMap& dfMap,
const Analysis* analysis, int latticeName);
*/
public:
// associates the given analysis/fact name with the given NodeFact,
// deleting any previous association (the previous NodeFact is freed)
void addFact(const Analysis* analysis, int factName, NodeFact* f);
// associates the given analysis with the given map of fact names to NodeFacts,
// deleting any previous association (the previous NodeFacts are freed). This call
// takes the actual provided facts and does not make a copy of them.
//void setFacts(const Analysis* analysis, const std::map<int, NodeFact*>& newFacts);
void setFacts(const Analysis* analysis, const std::vector<NodeFact*>& newFacts);
// returns the given fact, which owned by the given analysis
NodeFact* getFact(const Analysis* analysis, int factName) const ;
// returns the map of all the facts owned by the given analysis at this NodeState
// (read-only access)
//const std::map<int, NodeFact*>& getFacts(const Analysis* analysis) const;
const std::vector<NodeFact*>& getFacts(const Analysis* analysis) const;
// returns the map of all the facts owned by the given analysis at this NodeState
// (read/write access)
//std::map<int, NodeFact*>& getFactsMod(const Analysis* analysis);
std::vector<NodeFact*>& getFactsMod(const Analysis* analysis);
// removes the given fact, owned by the given analysis
// returns true if the given fact was found and removed and false if it was not found
//bool removeFact(const Analysis* analysis, int factName);
// deletes all facts at this node associated with the given analysis
void deleteFacts(const Analysis* analysis);
// delete all state at this node associated with the given analysis
void deleteState(const Analysis* analysis);
// ====== STATIC ======
private:
static std::map<DataflowNode, std::vector<NodeState*> > nodeStateMap;
static bool nodeStateMapInit;
public:
// returns the NodeState object associated with the given dataflow node.
// index is used when multiple NodeState objects are associated with a given node
// (ex: SgFunctionCallExp has 3 NodeStates: entry, function body, exit)
static NodeState* getNodeState(const DataflowNode& n, int index=0);
//returns the NodeState object associated with a given SgNode
//index is used when multiple Control flow nodes (and consequently multiple NodeStates) are associated with a given node
static NodeState* getNodeState(SgNode * n, int index=0);
// returns a vector of NodeState objects associated with the given dataflow node.
static const std::vector<NodeState*> getNodeStates(const DataflowNode& n);
// returns the number of NodeStates associated with the given DataflowNode
static int numNodeStates(DataflowNode& n);
private:
// initializes the nodeStateMap
static void initNodeStateMap(bool (*filter) (CFGNode cfgn));
public:
/*// copies the facts from that to this
void copyFacts(NodeState &that);
// copies the dfInfoBelow lattices from that to this
void copyLatticesBelow(NodeState &that);
// copies the dfInfoAbove lattices from the given map to this
void copyLatticesAbove(const LatticeMap& thatInfo);
// copies the dfInfoBelow lattices from the given map to this
void copyLatticesBelow(const LatticeMap& thatInfo);
protected:
// copies the dfInfoAbove or dfInfoBelow lattices from that to this
void copyLattices(const LatticeMap& dfInfo,
const LatticeMap& thatInfo);
*/
// copies from's above lattices for the given analysis to to's above lattices for the same analysis
static void copyLattices_aEQa(Analysis* analysis, NodeState& to, const NodeState& from);
// copies from's above lattices for analysisA to to's above lattices for analysisB
static void copyLattices_aEQa(Analysis* analysisA, NodeState& to, Analysis* analysisB, const NodeState& from);
// copies from's above lattices for the given analysis to to's below lattices for the same analysis
static void copyLattices_bEQa(Analysis* analysis, NodeState& to, const NodeState& from);
// copies from's above lattices for analysisA to to's below lattices for analysisB
static void copyLattices_bEQa(Analysis* analysisA, NodeState& to, Analysis* analysisB, const NodeState& from);
// copies from's below lattices for the given analysis to to's below lattices for the same analysis
static void copyLattices_bEQb(Analysis* analysis, NodeState& to, const NodeState& from);
// copies from's below lattices for the given analysis to to's above lattices for the same analysis
static void copyLattices_aEQb(Analysis* analysis, NodeState& to, const NodeState& from);
protected:
// makes dfInfoX a copy of dfInfoY
static void copyLattices(std::vector<Lattice*>& dfInfoX, const std::vector<Lattice*>& dfInfoY);
/*public:
void operator=(NodeState& that);*/
public:
std::string str(Analysis* analysis, std::string indent="") const;
};
#endif
|
{
"pile_set_name": "Github"
}
|
GL_NV_transform_feedback
http://developer.download.nvidia.com/opengl/specs/GL_NV_transform_feedback.txt
GL_NV_transform_feedback
GL_BACK_PRIMARY_COLOR_NV 0x8C77
GL_BACK_SECONDARY_COLOR_NV 0x8C78
GL_TEXTURE_COORD_NV 0x8C79
GL_CLIP_DISTANCE_NV 0x8C7A
GL_VERTEX_ID_NV 0x8C7B
GL_PRIMITIVE_ID_NV 0x8C7C
GL_GENERIC_ATTRIB_NV 0x8C7D
GL_TRANSFORM_FEEDBACK_ATTRIBS_NV 0x8C7E
GL_TRANSFORM_FEEDBACK_BUFFER_MODE_NV 0x8C7F
GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS_NV 0x8C80
GL_ACTIVE_VARYINGS_NV 0x8C81
GL_ACTIVE_VARYING_MAX_LENGTH_NV 0x8C82
GL_TRANSFORM_FEEDBACK_VARYINGS_NV 0x8C83
GL_TRANSFORM_FEEDBACK_BUFFER_START_NV 0x8C84
GL_TRANSFORM_FEEDBACK_BUFFER_SIZE_NV 0x8C85
GL_TRANSFORM_FEEDBACK_RECORD_NV 0x8C86
GL_PRIMITIVES_GENERATED_NV 0x8C87
GL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN_NV 0x8C88
GL_RASTERIZER_DISCARD_NV 0x8C89
GL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS_NV 0x8C8A
GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS_NV 0x8C8B
GL_INTERLEAVED_ATTRIBS_NV 0x8C8C
GL_SEPARATE_ATTRIBS_NV 0x8C8D
GL_TRANSFORM_FEEDBACK_BUFFER_NV 0x8C8E
GL_TRANSFORM_FEEDBACK_BUFFER_BINDING_NV 0x8C8F
void glBeginTransformFeedbackNV (GLenum primitiveMode)
void glEndTransformFeedbackNV (void)
void glTransformFeedbackAttribsNV (GLuint count, const GLint *attribs, GLenum bufferMode)
void glBindBufferRangeNV (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size)
void glBindBufferOffsetNV (GLenum target, GLuint index, GLuint buffer, GLintptr offset)
void glBindBufferBaseNV (GLenum target, GLuint index, GLuint buffer)
void glTransformFeedbackVaryingsNV (GLuint program, GLsizei count, const GLint *locations, GLenum bufferMode)
void glActiveVaryingNV (GLuint program, const GLchar *name)
GLint glGetVaryingLocationNV (GLuint program, const GLchar *name)
void glGetActiveVaryingNV (GLuint program, GLuint index, GLsizei bufSize, GLsizei *length, GLsizei *size, GLenum *type, GLchar *name)
void glGetTransformFeedbackVaryingNV (GLuint program, GLuint index, GLint *location)
|
{
"pile_set_name": "Github"
}
|
#region Copyright (C) 2005-2011 Team MediaPortal
// Copyright (C) 2005-2011 Team MediaPortal
// http://www.team-mediaportal.com
//
// MediaPortal is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 2 of the License, or
// (at your option) any later version.
//
// MediaPortal is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with MediaPortal. If not, see <http://www.gnu.org/licenses/>.
#endregion
using System;
using System.Diagnostics;
using System.IO;
using System.Threading;
using MediaPortal.GUI.Library;
internal class SlideCache
{
private enum RelativeIndex
{
Prev = 0,
Curr = 1,
Next = 2
}
private Thread _prefetchingThread;
private Object _prefetchingThreadLock = new Object();
private SlidePicture[] _slides = new SlidePicture[3];
private Object _slidesLock = new Object();
private string _neededSlideFilePath;
private RelativeIndex _neededSlideRelativeIndex;
private SlidePicture NeededSlide
{
get { return _slides[(int)_neededSlideRelativeIndex]; }
set { _slides[(int)_neededSlideRelativeIndex] = value; }
}
private SlidePicture PrevSlide
{
get { return _slides[(int)RelativeIndex.Prev]; }
set { _slides[(int)RelativeIndex.Prev] = value; }
}
private SlidePicture CurrentSlide
{
get { return _slides[(int)RelativeIndex.Curr]; }
set { _slides[(int)RelativeIndex.Curr] = value; }
}
private SlidePicture NextSlide
{
get { return _slides[(int)RelativeIndex.Next]; }
set { _slides[(int)RelativeIndex.Next] = value; }
}
public SlidePicture GetCurrentSlide(string slideFilePath)
{
// wait for any (needed) prefetching to complete
lock (_prefetchingThreadLock)
{
bool itemFiles = File.Exists(slideFilePath);
if (!itemFiles)
{
CurrentSlide = new SlidePicture(slideFilePath, false);
return CurrentSlide;
}
if (_prefetchingThread != null)
{
// only wait for the prefetching if it is for the slide file that we need
if (_neededSlideFilePath == slideFilePath)
{
_prefetchingThread.Priority = ThreadPriority.AboveNormal;
}
}
}
while (_prefetchingThread != null)
{
GUIWindowManager.Process();
}
lock (_slidesLock)
{
// try and use pre-fetched slide if appropriate
if (NextSlide != null && NextSlide.FilePath == slideFilePath)
{
return NextSlide;
}
if (PrevSlide != null && PrevSlide.FilePath == slideFilePath)
{
return PrevSlide;
}
if (CurrentSlide != null && CurrentSlide.FilePath == slideFilePath)
{
return CurrentSlide;
}
// slide is not in cache, so get it now
CurrentSlide = new SlidePicture(slideFilePath, false);
return CurrentSlide;
}
}
public void PrefetchNextSlide(string prevPath, string currPath, string nextPath)
{
lock (_prefetchingThreadLock)
{
// assume that any incomplete prefetching is uneeded, abort
if (_prefetchingThread != null)
{
_prefetchingThread.Abort();
_prefetchingThread = null;
}
}
lock (_slidesLock)
{
// shift slides and determine _neededSlideRelativeIndex
if (NextSlide != null && NextSlide.FilePath == currPath)
{
PrevSlide = CurrentSlide;
CurrentSlide = NextSlide;
_neededSlideFilePath = nextPath;
_neededSlideRelativeIndex = RelativeIndex.Next;
}
else if (PrevSlide != null && PrevSlide.FilePath == currPath)
{
NextSlide = CurrentSlide;
CurrentSlide = PrevSlide;
_neededSlideFilePath = prevPath;
_neededSlideRelativeIndex = RelativeIndex.Prev;
}
else
{
// may need all 3, but just get next
_neededSlideFilePath = nextPath;
_neededSlideRelativeIndex = RelativeIndex.Next;
}
}
lock (_prefetchingThreadLock)
{
_prefetchingThread = new Thread(LoadNextSlideThread);
_prefetchingThread.IsBackground = true;
_prefetchingThread.Name = "PicPrefetch";
//string cacheString = String.Format("cache:{0}|{1}|{2} ",
// _slides[0] != null ? "1" : "0",
// _slides[1] != null ? "1" : "0",
// _slides[2] != null ? "1" : "0");
//Trace.WriteLine(cacheString + String.Format("prefetching {0} slide {1}", _neededSlideRelativeIndex.ToString("G"), System.IO.Path.GetFileNameWithoutExtension(_neededSlideFilePath)));
_prefetchingThread.Start();
}
}
/// <summary>
/// Method to do the work of actually loading the image from file. This method
/// should only be used by the prefetching thread.
/// </summary>
public void LoadNextSlideThread()
{
try
{
Debug.Assert(Thread.CurrentThread == _prefetchingThread);
lock (_slidesLock)
{
NeededSlide = new SlidePicture(_neededSlideFilePath, false);
}
lock (_prefetchingThreadLock)
{
_prefetchingThread = null;
}
}
catch (ThreadAbortException ex)
{
Log.Debug("SlideCache: LoadNextSlideThread {0}", ex.Message);
// abort is expected when slide changes outpace prefetch, ignore
// Trace.WriteLine(String.Format(" ...aborted {0} slide {1}", _neededSlideRelativeIndex.ToString("G"), System.IO.Path.GetFileNameWithoutExtension(_neededSlideFilePath)));
}
catch (Exception ex)
{
Log.Error("SlideCache: LoadNextSlideThread {0}", ex.Message);
}
}
public void InvalidateSlide(string slideFilePath)
{
lock (_slidesLock)
{
for (int i = 0; i < _slides.Length; i++)
{
SlidePicture slide = _slides[i];
if (slide != null && slide.FilePath == slideFilePath)
{
_slides[i] = null;
}
}
}
// Note that we could pre-fetch the invalidated slide, but if the new version
// of the slide is going to be requested immediately (as with DoRotate) then
// pre-fetching won't help.
}
}
|
{
"pile_set_name": "Github"
}
|
---
Description: The previous topic (How the Resource Management System matches and chooses resources) looks at qualifier-matching in general. This topic focuses on language-tag-matching in more detail.
title: How the Resource Management System matches language tags
template: detail.hbs
ms.date: 11/02/2017
ms.topic: article
keywords: windows 10, uwp, resource, image, asset, MRT, qualifier
ms.localizationpriority: medium
---
# How the Resource Management System matches language tags
The previous topic ([How the Resource Management System matches and chooses resources](how-rms-matches-and-chooses-resources.md)) looks at qualifier-matching in general. This topic focuses on language-tag-matching in more detail.
## Introduction
Resources with language tag qualifiers are compared and scored based on the app runtime language list. For definitions of the different language lists, see [Understand user profile languages and app manifest languages](../design/globalizing/manage-language-and-region.md). Matching for the first language in a list occurs before matching of the second language in a list, even for other regional variants. For example, a resource for en-GB is chosen over an fr-CA resource if the app runtime language is en-US. Only if there are no resources for a form of en is a resource for fr-CA chosen (note that the app's default language could not be set to any form of en in that case).
The scoring mechanism uses data that is included in the [BCP-47](https://tools.ietf.org/html/bcp47) subtag registry, and other data sources. It allows for a scoring gradient with different qualities of match and, when multiple candidates are available, it selects the candidate with the best-matching score.
So, you can tag language content in generic terms, but you can still specify specific content when needed. For example, your app might have many English strings that are common to both the United States, Britain, and other regions. Tagging these strings as "en" (English) saves space and localization overhead. When distinctions need to be made, such as in a string containing the word "color/colour", the United States and British versions can be tagged separately using both language and region subtags, as "en-US" and "en-GB", respectively.
## Language tags
Languages are identified using normalized, well-formed BCP-47 language tags. Subtag components are defined in the BCP-47 subtag registry. The normal structure for a BCP-47 language tag consists of one or more of the following subtag elements.
- Language subtag (required).
- Script subtag (which may be inferred using the default specified in the subtag registry).
- Region subtag (optional).
- Variant subtag (optional).
Additional subtag elements may be present, but they will have a negligible effect on language matching. There are no language ranges defined using the wild card ("*"), for example, "en-*".
## Matching two languages
Whenever Windows compares two languages it is typically done within the context of a larger process. It may be in the context of assessing multiple languages, such as when Windows generates the application language list (see [Understand user profile languages and app manifest languages](../design/globalizing/manage-language-and-region.md)). Windows does this by matching multiple languages from the user preferences to the languages specified in the app's manifest. The comparison might also be in the context of assessing language along with other qualifiers for a particular resource. One example is when Windows resolves a particular file resource to a particular resource context; with the user's home location or the device's current scale or dpi as other factors (besides language) that are factored into the resource selection.
When two language tags are compared, the comparison is assigned a score based on the nearness of the match.
| Match | Score | Example |
| ----- | ----- | ------- |
| Exact match | Highest | en-AU : en-AU |
| Variant match (language, script, region, variant) | | en-AU-variant1 : en-AU-variant1-t-ja |
| Region match (language, script, region) | | en-AU : en-AU-variant1 |
| Partial match (language, script) | | |
| - Macro region match | | en-AU : en-053 |
| - Region-neutral match | | en-AU : en |
| - Orthographic affinity match (limited support) | | en-AU : en-GB |
| - Preferred region match | | en-AU : en-US |
| - Any region match | | en-AU : en-CA |
| Undetermined language (any language match) | | en-AU : und |
| No match (script mismatch or primary language tag mismatch) | Lowest | en-AU : fr-FR |
### Exact match
The tags are exactly equal (all subtag elements match). A comparison may be promoted to this match type from a variant or region match. For example, en-US matches en-US.
### Variant match
The tags match on the language, script, region, and variant subtags, but they differ in some other respect.
### Region match
The tags match on the language, script, and region subtags, but they differ in some other respect. For example, de-DE-1996 matches de-DE, and en-US-x-Pirate matches en-US.
### Partial matches
The tags match on the language and script subtags, but they differ in the region or some other subtag. For example, en-US matches en, or en-US matches en-\*.
#### Macro region match
The tags match on language and script subtags; both tags have region subtags, one of which denotes a macro region that encompasses the other region. The macro region subtags are always numeric and are derived from the United Nations Statistics Division M.49 country and area codes. For details on encompassing relationships, see [Composition of macro geographic (continental) regions, geographical sub-regions, and selected economic and other groupings](https://unstats.un.org/unsd/methods/m49/m49regin.htm).
**Note** UN codes for "economic groupings" or "other groupings" are not supported in BCP-47.
**Note** A tag with the macro-region subtag "001" is considered equivalent to a region-neutral tag. For example, "es-001" and "es" are treated as synonymous.
#### Region-neutral match
The tags match on language and script subtags, and just one tag has a region tag. A parent match is preferred over other partial matches.
#### Orthographic affinity match
The tags match on language and script subtags, and the region subtags have orthographic affinity. Affinity relies on data maintained in Windows that defines language-specific affined regions, for example, "en-IE" and "en-GB".
#### Preferred region match
The tags match on language and script subtags, and one of the region subtags is the default region subtag for the language. For example, "fr-FR" is the default region for the "fr" subtag. So, fr-FR is a better match for fr-BE than is fr-CA. This relies on data maintained in Windows defining a default region for each language in which Windows is localized.
#### Sibling match
The tags match on language and script subtags, and both have region subtags, but no other relationship is defined between them. In the event of multiple sibling matches, the last enumerated sibling will be the winner, in the absence of a higher match.
### Undetermined language
A resource may be tagged as "und" to indicate that it matches any language. This tag may also be used with a script tag to filter matches based on script. For example, "und-Latn" will match any language tag that uses Latin script. See below for more details.
### Script mismatch
When the tags match only on the primary language tag but not the script, the pair is considered not to match and is scored below the level of a valid match.
### No match
Mismatching primary language subtags are scored below the level of a valid match. For example, zh-Hant does not match zh-Hans.
## Examples
A user language "zh-Hans-CN" (Chinese Simplified (China)) matches the following resources in the priority order shown. An X indicates no match.

1. Exact match; 2. & 3. Region match; 4. Parent match; 5. Sibling match.
When a language subtag has a Suppress-Script value defined in the BCP-47 subtag registry, corresponding matching occurs, taking on the value of the suppressed script code. For example, en-Latn-US matches en-US. In this next example the user language is "en-AU" (English (Australia)).

1. Exact match; 2. Macro region match; 3. Region-neutral match; 4. Orthographic affinity match; 5. Preferred region match; 6. Sibling match.
## Matching a language to a language list
At times, matching occurs as part of a bigger process of matching a single language to a list of languages. For example, there may be a match of a single language-based resource to an app's language list. The score of the match is weighted by the position of the first matching language in the list. The lower the language is in the list, the lower the score will be.
When the language list contains two or more regional variants having the same language and script subtags, comparisons for the first language tag are only scored for exact, variant, and region matches. Scoring partial matches is postponed to the last regional variant. This enables users to finely control the matching behavior for their language list. The matching behavior may include allowing an exact match for a secondary item in the list to be preferred over a partial match for the first item in the list, if there is a third item that matches the language and script of the first. Here's an example.
- Language list (in order): "pt-PT" (Portuguese (Portugal)), "en-US" (English (United States)), "pt-BR" (Portuguese (Brazil)).
- Resources: "en-US", "pt-BR".
- Resource with the higher score: "en-US".
- Description: The comparison starts with "pt-PT" but does not find an exact match. Due to the presence of "pt-BR" in the user's language list, partial matching is postponed to the comparison with "pt-BR". The next language comparison is "en-US", which has an exact match. So, the winning resource is "en-US".
OR
- Language list (in order): "es-MX" (Spanish (Mexico)), "es-HO" (Spanish (Honduras)).
- Resources: "en-ES", "es-HO".
- Resource with the higher score: "es-HO".
## Undetermined language ("und")
The language tag "und" may be used to specify a resource that will match any language in the absence of a better match. It can be considered similar to the BCP-47 language range "*" or "*-<script>". Here's an example.
- Language list: "en-US", "zh-Hans-CN".
- Resources: "zh-Hans-CN", "und".
- Resource with the higher score: "und".
- Description: The comparison starts with "en-US" but does not find a match based on "en" (partial or better). Since there is a resource tagged with "und", the matching algorithm uses that.
The tag "und" enables multiple languages to share a single resource and permits individual languages to be treated as exceptions. For example.
- Language list: "zh-Hans-CN", "en-US".
- Resources: "zh-Hans-CN", "und".
- Resource with the higher score: "zh-Hans-CN".
- Description: The comparison finds an exact match for the first item and so it doesn't check for the resource labeled "und".
You can use "und" with a script tag to filter resources by script. For example.
- Language list: "ru".
- Resources: "und-Latn", "und-Cyrl", "und-Arab".
- Resource with the higher score: "und-Cyrl".
- Description: The comparison doesn't find a match for "ru" (partial or better), and so matches the language tag "und". The suppress-script value "Cyrl" associated with the language tag "ru" matches the resource "und-Cyrl".
## Orthographic regional affinity
When two language tags with region subtag differences are matched, particular pairs of regions may have higher affinity to one another than to others. The only supported affined groups are for English ("en"). The region subtags "PH" (Philippines) and "LR" (Liberia) have orthographic affinity with the "US" region subtag. All other region subtags are affined with the "GB" (United Kingdom) region subtag. Therefore, when both "en-US" and "en-GB" resources are available, a language list of "en-HK" (English (Hong Kong SAR)) will get a higher score with "en-GB" resources than with "en-US" resources.
## Handling languages with many regional variants
Certain languages have large speaker communities in different regions that use different varieties of that language—languages such as English, French and Spanish, which are among those most often supported in multilingual apps. Regional differences can include differences in orthography (for instance, "color" versus "colour"), or dialect differences such as vocabulary (for instance, "truck" versus "lorry").
These languages with significant regional variants present certain challenges when making a world-ready app: "How many different regional variants should be supported?" "Which ones?" "What's the most cost-effective way to manage these regional variant assets for my app?" It's beyond the scope of this topic to answer all these questions. However, the language matching mechanisms in Windows do provide capabilities that can help you in handling regional variants.
Apps will often support only a single variety of any given language. Suppose an app has resources for just one variety of English that are expected to be used by English speakers regardless of what region they are from. In this case, the tag "en" without any region subtag would reflect that expectation. But apps might have historically used a tag such as "en-US" that includes a region subtag. In this case, that will also work: the app uses only one variety of English, and Windows handles matching a resource tagged for one regional variant with a user language preference for a different regional variant in an appropriate way.
If two or more regional varieties are going to be supported, however, a difference such as "en" versus "en-US" can have a significant impact on the user experience, and it becomes important to consider what region subtags to use.
Suppose you want to provide separate French localizations for French as used in Canada versus European French. For Canadian French, "fr-CA" can be used. For speakers from Europe, the localization will use French (France), and so "fr-FR" can be used for that. But what if a given user is from Belgium, with a language preference of "fr-BE"; which will they get? The region "BE" is different from both "FR" and "CA", suggesting an "any region" match for both. However, France happens to be the preferred region for French, and so the "fr-FR" will be considered the best match in this case.
Suppose you had first localized your app for only one variety of French, using French (France) strings but qualifying them generically as "fr", and then you want to add support for Canadian French. Probably only certain resources need to be re-translated for Canadian French. You can continue to use all the original assets keeping them qualified as "fr", and just add the small set of new assets using "fr-CA". If the user language preference is "fr-CA", then the "fr-CA" asset will have a higher matching score than the "fr" asset. But if the user language preference is for any other variety of French, then the region-neutral asset "fr" will be a better match than the "fr-CA" asset.
As another example, suppose you want to provide separate Spanish localizations for speakers from Spain versus speakers from Latin America. Suppose further that the translations for Latin America were provided from a vendor in Mexico. Should you use "es-ES" (Spain) and "es-MX" (Mexico) for two sets of resources? If you did, that could create problems for speakers from other Latin American regions such as Argentina or Colombia, since they would get the "es-ES" resources. In this case, there is a better alternative: you can use a macro region subtag, "es-419" to reflect that you intend the assets to be used for speakers from any part of Latin America or the Caribbean.
Region-neutral language tags and macro region subtags can be very effective if you want to support several regional varieties. To minimize the number of separate assets you need, you can qualify a given asset in a way that reflects the broadest coverage for which it is applicable. Then supplement a broadly-applicable asset with a more specific variant as needed. An asset with a region-neutral language qualifier will be used for users of any regional variety unless there is another asset with a more regionally-specific qualifier that applies to that user. For example, an "en" asset will match for an Australian English user, but an asset with "en-053" (English as used in Australia or New Zealand) will be a better match for that user, while an asset with "en-AU" will be the best possible match.
English needs special consideration. If an app adds localization for two English varieties, those will likely be for US English and for UK, or "international", English. As noted above, certain regions outside the US follow United States spelling conventions, and Windows language matching takes that into consideration. In this scenario, it is not recommended to use the region-neutral tag "en" for one of the variants; instead, use "en-GB" and "en-US". (If a given resource doesn’t require separate variants, however, "en" can be used.) If either "en-GB" or "en-US" is replaced by "en", then that will interfere with the orthographic regional affinity provided by Windows. If a third English localization is added, then use a specific or macro region subtag for the additional variants as needed (for example, "en-CA", "en-AU" or "en-053"), but continue to use "en-GB" and "en-US".
## Related topics
* [How the Resource Management System matches and chooses resources](how-rms-matches-and-chooses-resources.md)
* [BCP-47](https://tools.ietf.org/html/bcp47)
* [Understand user profile languages and app manifest languages](../design/globalizing/manage-language-and-region.md)
* [Composition of macro geographic (continental) regions, geographical sub-regions, and selected economic and other groupings](https://unstats.un.org/unsd/methods/m49/m49regin.htm)
|
{
"pile_set_name": "Github"
}
|
Network Working Group L. Barbato
Request for Comments: 5215 Xiph
Category: Standards Track August 2008
RTP Payload Format for Vorbis Encoded Audio
Status of This Memo
This document specifies an Internet standards track protocol for the
Internet community, and requests discussion and suggestions for
improvements. Please refer to the current edition of the "Internet
Official Protocol Standards" (STD 1) for the standardization state
and status of this protocol. Distribution of this memo is unlimited.
Abstract
This document describes an RTP payload format for transporting Vorbis
encoded audio. It details the RTP encapsulation mechanism for raw
Vorbis data and the delivery mechanisms for the decoder probability
model (referred to as a codebook), as well as other setup
information.
Also included within this memo are media type registrations and the
details necessary for the use of Vorbis with the Session Description
Protocol (SDP).
Barbato Standards Track [Page 1]
RFC 5215 Vorbis RTP Payload Format August 2008
Table of Contents
1. Introduction . . . . . . . . . . . . . . . . . . . . . . . . . 3
1.1. Conformance and Document Conventions . . . . . . . . . . . 3
2. Payload Format . . . . . . . . . . . . . . . . . . . . . . . . 3
2.1. RTP Header . . . . . . . . . . . . . . . . . . . . . . . . 4
2.2. Payload Header . . . . . . . . . . . . . . . . . . . . . . 5
2.3. Payload Data . . . . . . . . . . . . . . . . . . . . . . . 6
2.4. Example RTP Packet . . . . . . . . . . . . . . . . . . . . 8
3. Configuration Headers . . . . . . . . . . . . . . . . . . . . 8
3.1. In-band Header Transmission . . . . . . . . . . . . . . . 9
3.1.1. Packed Configuration . . . . . . . . . . . . . . . . . 10
3.2. Out of Band Transmission . . . . . . . . . . . . . . . . . 12
3.2.1. Packed Headers . . . . . . . . . . . . . . . . . . . . 12
3.3. Loss of Configuration Headers . . . . . . . . . . . . . . 13
4. Comment Headers . . . . . . . . . . . . . . . . . . . . . . . 13
5. Frame Packetization . . . . . . . . . . . . . . . . . . . . . 14
5.1. Example Fragmented Vorbis Packet . . . . . . . . . . . . . 15
5.2. Packet Loss . . . . . . . . . . . . . . . . . . . . . . . 17
6. IANA Considerations . . . . . . . . . . . . . . . . . . . . . 18
6.1. Packed Headers IANA Considerations . . . . . . . . . . . . 19
7. SDP Related Considerations . . . . . . . . . . . . . . . . . . 20
7.1. Mapping Media Type Parameters into SDP . . . . . . . . . . 20
7.1.1. SDP Example . . . . . . . . . . . . . . . . . . . . . 21
7.2. Usage with the SDP Offer/Answer Model . . . . . . . . . . 22
8. Congestion Control . . . . . . . . . . . . . . . . . . . . . . 22
9. Example . . . . . . . . . . . . . . . . . . . . . . . . . . . 22
9.1. Stream Radio . . . . . . . . . . . . . . . . . . . . . . . 22
10. Security Considerations . . . . . . . . . . . . . . . . . . . 23
11. Copying Conditions . . . . . . . . . . . . . . . . . . . . . . 23
12. Acknowledgments . . . . . . . . . . . . . . . . . . . . . . . 23
13. References . . . . . . . . . . . . . . . . . . . . . . . . . . 24
13.1. Normative References . . . . . . . . . . . . . . . . . . . 24
13.2. Informative References . . . . . . . . . . . . . . . . . . 25
Barbato Standards Track [Page 2]
RFC 5215 Vorbis RTP Payload Format August 2008
1. Introduction
Vorbis is a general purpose perceptual audio codec intended to allow
maximum encoder flexibility, thus allowing it to scale competitively
over an exceptionally wide range of bit rates. At the high quality/
bitrate end of the scale (CD or DAT rate stereo, 16/24 bits), it is
in the same league as MPEG-4 AAC. Vorbis is also intended for lower
and higher sample rates (from 8kHz telephony to 192kHz digital
masters) and a range of channel representations (monaural,
polyphonic, stereo, quadraphonic, 5.1, ambisonic, or up to 255
discrete channels).
Vorbis encoded audio is generally encapsulated within an Ogg format
bitstream [RFC3533], which provides framing and synchronization. For
the purposes of RTP transport, this layer is unnecessary, and so raw
Vorbis packets are used in the payload.
1.1. Conformance and Document Conventions
The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
"SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
document are to be interpreted as described in BCP 14, [RFC2119] and
indicate requirement levels for compliant implementations.
Requirements apply to all implementations unless otherwise stated.
An implementation is a software module that supports one of the media
types defined in this document. Software modules may support
multiple media types, but conformance is considered individually for
each type.
Implementations that fail to satisfy one or more "MUST" requirements
are considered non-compliant. Implementations that satisfy all
"MUST" requirements, but fail to satisfy one or more "SHOULD"
requirements, are said to be "conditionally compliant". All other
implementations are "unconditionally compliant".
2. Payload Format
For RTP-based transport of Vorbis-encoded audio, the standard RTP
header is followed by a 4-octet payload header, and then the payload
data. The payload headers are used to associate the Vorbis data with
its associated decoding codebooks as well as indicate if the
following packet contains fragmented Vorbis data and/or the number of
whole Vorbis data frames. The payload data contains the raw Vorbis
bitstream information. There are 3 types of Vorbis data; an RTP
payload MUST contain just one of them at a time.
Barbato Standards Track [Page 3]
RFC 5215 Vorbis RTP Payload Format August 2008
2.1. RTP Header
The format of the RTP header is specified in [RFC3550] and shown in
Figure 1. This payload format uses the fields of the header in a
manner consistent with that specification.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X| CC |M| PT | sequence number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| timestamp |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronization source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| ... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 1: RTP Header
The RTP header begins with an octet of fields (V, P, X, and CC) to
support specialized RTP uses (see [RFC3550] and [RFC3551] for
details). For Vorbis RTP, the following values are used.
Version (V): 2 bits
This field identifies the version of RTP. The version used by this
specification is two (2).
Padding (P): 1 bit
Padding MAY be used with this payload format according to Section 5.1
of [RFC3550].
Extension (X): 1 bit
The Extension bit is used in accordance with [RFC3550].
CSRC count (CC): 4 bits
The CSRC count is used in accordance with [RFC3550].
Marker (M): 1 bit
Set to zero. Audio silence suppression is not used. This conforms
to Section 4.1 of [VORBIS-SPEC-REF].
Barbato Standards Track [Page 4]
RFC 5215 Vorbis RTP Payload Format August 2008
Payload Type (PT): 7 bits
An RTP profile for a class of applications is expected to assign a
payload type for this format, or a dynamically allocated payload type
SHOULD be chosen that designates the payload as Vorbis.
Sequence number: 16 bits
The sequence number increments by one for each RTP data packet sent,
and may be used by the receiver to detect packet loss and to restore
the packet sequence. This field is detailed further in [RFC3550].
Timestamp: 32 bits
A timestamp representing the sampling time of the first sample of the
first Vorbis packet in the RTP payload. The clock frequency MUST be
set to the sample rate of the encoded audio data and is conveyed out-
of-band (e.g., as an SDP parameter).
SSRC/CSRC identifiers:
These two fields, 32 bits each with one SSRC field and a maximum of
16 CSRC fields, are as defined in [RFC3550].
2.2. Payload Header
The 4 octets following the RTP Header section are the Payload Header.
This header is split into a number of bit fields detailing the format
of the following payload data packets.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | F |VDT|# pkts.|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 2: Payload Header
Ident: 24 bits
This 24-bit field is used to associate the Vorbis data to a decoding
Configuration. It is stored as a network byte order integer.
Fragment type (F): 2 bits
Barbato Standards Track [Page 5]
RFC 5215 Vorbis RTP Payload Format August 2008
This field is set according to the following list:
0 = Not Fragmented
1 = Start Fragment
2 = Continuation Fragment
3 = End Fragment
Vorbis Data Type (VDT): 2 bits
This field specifies the kind of Vorbis data stored in this RTP
packet. There are currently three different types of Vorbis
payloads. Each packet MUST contain only a single type of Vorbis
packet (e.g., you must not aggregate configuration and comment
packets in the same RTP payload).
0 = Raw Vorbis payload
1 = Vorbis Packed Configuration payload
2 = Legacy Vorbis Comment payload
3 = Reserved
The packets with a VDT of value 3 MUST be ignored.
The last 4 bits represent the number of complete packets in this
payload. This provides for a maximum number of 15 Vorbis packets in
the payload. If the payload contains fragmented data, the number of
packets MUST be set to 0.
2.3. Payload Data
Raw Vorbis packets are currently unbounded in length; application
profiles will likely define a practical limit. Typical Vorbis packet
sizes range from very small (2-3 bytes) to quite large (8-12
kilobytes). The reference implementation [LIBVORBIS] typically
produces packets less than ~800 bytes, except for the setup header
packets, which are ~4-12 kilobytes. Within an RTP context, to avoid
fragmentation, the Vorbis data packet size SHOULD be kept
sufficiently small so that after adding the RTP and payload headers,
the complete RTP packet is smaller than the path MTU.
Barbato Standards Track [Page 6]
RFC 5215 Vorbis RTP Payload Format August 2008
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | vorbis packet data ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 3: Payload Data Header
Each Vorbis payload packet starts with a two octet length header,
which is used to represent the size in bytes of the following data
payload, and is followed by the raw Vorbis data padded to the nearest
byte boundary, as explained by the Vorbis I Specification
[VORBIS-SPEC-REF]. The length value is stored as a network byte
order integer.
For payloads that consist of multiple Vorbis packets, the payload
data consists of the packet length followed by the packet data for
each of the Vorbis packets in the payload.
The Vorbis packet length header is the length of the Vorbis data
block only and does not include the length field.
The payload packing of the Vorbis data packets MUST follow the
guidelines set out in [RFC3551], where the oldest Vorbis packet
occurs immediately after the RTP packet header. Subsequent Vorbis
packets, if any, MUST follow in temporal order.
Audio channel mapping is in accordance with the Vorbis I
Specification [VORBIS-SPEC-REF].
Barbato Standards Track [Page 7]
RFC 5215 Vorbis RTP Payload Format August 2008
2.4. Example RTP Packet
Here is an example RTP payload containing two Vorbis packets.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| 2 |0|0| 0 |0| PT | sequence number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| timestamp (in sample rate units) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronisation source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| ... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | 0 | 0 | 2 pks |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | vorbis data ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. vorbis data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | next vorbis packet data ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. vorbis data ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. vorbis data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 4: Example Raw Vorbis Packet
The payload data section of the RTP packet begins with the 24-bit
Ident field followed by the one octet bit field header, which has the
number of Vorbis frames set to 2. Each of the Vorbis data frames is
prefixed by the two octets length field. The Packet Type and
Fragment Type are set to 0. The Configuration that will be used to
decode the packets is the one indexed by the ident value.
3. Configuration Headers
Unlike other mainstream audio codecs, Vorbis has no statically
configured probability model. Instead, it packs all entropy decoding
configuration, Vector Quantization and Huffman models into a data
block that must be transmitted to the decoder with the compressed
data. A decoder also requires information detailing the number of
audio channels, bitrates, and similar information to configure itself
for a particular compressed data stream. These two blocks of
Barbato Standards Track [Page 8]
RFC 5215 Vorbis RTP Payload Format August 2008
information are often referred to collectively as the "codebooks" for
a Vorbis stream, and are included as special "header" packets at the
start of the compressed data. In addition, the Vorbis I
specification [VORBIS-SPEC-REF] requires the presence of a comment
header packet that gives simple metadata about the stream, but this
information is not required for decoding the frame sequence.
Thus, these two codebook header packets must be received by the
decoder before any audio data can be interpreted. These requirements
pose problems in RTP, which is often used over unreliable transports.
Since this information must be transmitted reliably and, as the RTP
stream may change certain configuration data mid-session, there are
different methods for delivering this configuration data to a client,
both in-band and out-of-band, which are detailed below. In order to
set up an initial state for the client application, the configuration
MUST be conveyed via the signalling channel used to set up the
session. One example of such signalling is SDP [RFC4566] with the
Offer/Answer Model [RFC3264]. Changes to the configuration MAY be
communicated via a re-invite, conveying a new SDP, or sent in-band in
the RTP channel. Implementations MUST support an in-band delivery of
updated codebooks, and SHOULD support out-of-band codebook update
using a new SDP file. The changes may be due to different codebooks
as well as different bitrates of the RTP stream.
For non-chained streams, the recommended Configuration delivery
method is inside the Packed Configuration (Section 3.1.1) in the SDP
as explained the Mapping Media Type Parameters into SDP
(Section 7.1).
The 24-bit Ident field is used to map which Configuration will be
used to decode a packet. When the Ident field changes, it indicates
that a change in the stream has taken place. The client application
MUST have in advance the correct configuration. If the client
detects a change in the Ident value and does not have this
information, it MUST NOT decode the raw associated Vorbis data until
it fetches the correct Configuration.
3.1. In-band Header Transmission
The Packed Configuration (Section 3.1.1) Payload is sent in-band with
the packet type bits set to match the Vorbis Data Type. Clients MUST
be capable of dealing with fragmentation and periodic re-transmission
of [RFC4588] the configuration headers. The RTP timestamp value MUST
reflect the transmission time of the first data packet for which this
configuration applies.
Barbato Standards Track [Page 9]
RFC 5215 Vorbis RTP Payload Format August 2008
3.1.1. Packed Configuration
A Vorbis Packed Configuration is indicated with the Vorbis Data Type
field set to 1. Of the three headers defined in the Vorbis I
specification [VORBIS-SPEC-REF], the Identification and the Setup
MUST be packed as they are, while the Comment header MAY be replaced
with a dummy one.
The packed configuration stores Xiph codec configurations in a
generic way: the first field stores the number of the following
packets minus one (count field), the next ones represent the size of
the headers (length fields), and the headers immediately follow the
list of length fields. The size of the last header is implicit.
The count and the length fields are encoded using the following
logic: the data is in network byte order; every byte has the most
significant bit used as a flag, and the following 7 bits are used to
store the value. The first 7 most significant bits are stored in the
first byte. If there are remaining bits, the flag bit is set to 1
and the subsequent 7 bits are stored in the following byte. If there
are remaining bits, set the flag to 1 and the same procedure is
repeated. The ending byte has the flag bit set to 0. To decode,
simply iterate over the bytes until the flag bit is set to 0. For
every byte, the data is added to the accumulated value multiplied by
128.
The headers are packed in the same order as they are present in Ogg
[VORBIS-SPEC-REF]: Identification, Comment, Setup.
The 2 byte length tag defines the length of the packed headers as the
sum of the Configuration, Comment, and Setup lengths.
Barbato Standards Track [Page 10]
RFC 5215 Vorbis RTP Payload Format August 2008
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X| CC |M| PT | xxxx |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| xxxxx |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronization source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| ... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | 0 | 1 | 1|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | n. of headers | length1 |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length2 | Identification ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Identification ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Identification ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Identification ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Identification | Comment ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Comment ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Comment ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Comment ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Comment | Setup ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Setup ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Setup ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 5: Packed Configuration Figure
The Ident field is set with the value that will be used by the Raw
Payload Packets to address this Configuration. The Fragment type is
set to 0 because the packet bears the full Packed configuration. The
number of the packet is set to 1.
Barbato Standards Track [Page 11]
RFC 5215 Vorbis RTP Payload Format August 2008
3.2. Out of Band Transmission
The following packet definition MUST be used when Configuration is
inside in the SDP.
3.2.1. Packed Headers
As mentioned above, the RECOMMENDED delivery vector for Vorbis
configuration data is via a retrieval method that can be performed
using a reliable transport protocol. As the RTP headers are not
required for this method of delivery, the structure of the
configuration data is slightly different. The packed header starts
with a 32-bit (network-byte ordered) count field, which details the
number of packed headers that are contained in the bundle. The
following shows the Packed header payload for each chained Vorbis
stream.
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Number of packed headers |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Packed header |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Packed header |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 6: Packed Headers Overview
Barbato Standards Track [Page 12]
RFC 5215 Vorbis RTP Payload Format August 2008
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | length ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. | n. of headers | length1 | length2 ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. | Identification Header ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.................................................................
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. | Comment Header ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.................................................................
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Comment Header |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Setup Header ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.................................................................
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Setup Header |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 7: Packed Headers Detail
The key difference between the in-band format and this one is that
there is no need for the payload header octet. In this figure, the
comment has a size bigger than 127 bytes.
3.3. Loss of Configuration Headers
Unlike the loss of raw Vorbis payload data, loss of a configuration
header leads to a situation where it will not be possible to
successfully decode the stream. Implementations MAY try to recover
from an error by requesting again the missing Configuration or, if
the delivery method is in-band, by buffering the payloads waiting for
the Configuration needed to decode them. The baseline reaction
SHOULD either be reset or end the RTP session.
4. Comment Headers
Vorbis Data Type flag set to 2 indicates that the packet contains the
comment metadata, such as artist name, track title, and so on. These
metadata messages are not intended to be fully descriptive but rather
to offer basic track/song information. Clients MAY ignore it
completely. The details on the format of the comments can be found
in the Vorbis I Specification [VORBIS-SPEC-REF].
Barbato Standards Track [Page 13]
RFC 5215 Vorbis RTP Payload Format August 2008
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X| CC |M| PT | xxxx |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| xxxxx |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronization source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| ... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | 0 | 2 | 1|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | Comment ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Comment ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. Comment |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 8: Comment Packet
The 2-byte length field is necessary since this packet could be
fragmented.
5. Frame Packetization
Each RTP payload contains either one Vorbis packet fragment or an
integer number of complete Vorbis packets (up to a maximum of 15
packets, since the number of packets is defined by a 4-bit value).
Any Vorbis data packet that is less than path MTU SHOULD be bundled
in the RTP payload with as many Vorbis packets as will fit, up to a
maximum of 15, except when such bundling would exceed an
application's desired transmission latency. Path MTU is detailed in
[RFC1191] and [RFC1981].
A fragmented packet has a zero in the last four bits of the payload
header. The first fragment will set the Fragment type to 1. Each
fragment after the first will set the Fragment type to 2 in the
payload header. The consecutive fragments MUST be sent without any
other payload being sent between the first and the last fragment.
The RTP payload containing the last fragment of the Vorbis packet
will have the Fragment type set to 3. To maintain the correct
sequence for fragmented packet reception, the timestamp field of
fragmented packets MUST be the same as the first packet sent, with
Barbato Standards Track [Page 14]
RFC 5215 Vorbis RTP Payload Format August 2008
the sequence number incremented as normal for the subsequent RTP
payloads; this will affect the RTCP jitter measurement. The length
field shows the fragment length.
5.1. Example Fragmented Vorbis Packet
Here is an example of a fragmented Vorbis packet split over three RTP
payloads. Each of them contains the standard RTP headers as well as
the 4-octet Vorbis headers.
Packet 1:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X| CC |M| PT | 1000 |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| 12345 |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronization source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| ... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | 1 | 0 | 0|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | vorbis data ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. vorbis data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 9: Example Fragmented Packet (Packet 1)
In this payload, the initial sequence number is 1000 and the
timestamp is 12345. The Fragment type is set to 1, the number of
packets field is set to 0, and as the payload is raw Vorbis data, the
VDT field is set to 0.
Barbato Standards Track [Page 15]
RFC 5215 Vorbis RTP Payload Format August 2008
Packet 2:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X| CC |M| PT | 1001 |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| 12345 |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronization source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| ... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | 2 | 0 | 0|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | vorbis data ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. vorbis data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 10: Example Fragmented Packet (Packet 2)
The Fragment type field is set to 2, and the number of packets field
is set to 0. For large Vorbis fragments, there can be several of
these types of payloads. The maximum packet size SHOULD be no
greater than the path MTU, including all RTP and payload headers.
The sequence number has been incremented by one, but the timestamp
field remains the same as the initial payload.
Barbato Standards Track [Page 16]
RFC 5215 Vorbis RTP Payload Format August 2008
Packet 3:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
|V=2|P|X| CC |M| PT | 1002 |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| 12345 |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| synchronization source (SSRC) identifier |
+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
| contributing source (CSRC) identifiers |
| ... |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ident | 3 | 0 | 0|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| length | vorbis data ..
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
.. vorbis data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Figure 11: Example Fragmented Packet (Packet 3)
This is the last Vorbis fragment payload. The Fragment type is set
to 3 and the packet count remains set to 0. As in the previous
payloads, the timestamp remains set to the first payload timestamp in
the sequence and the sequence number has been incremented.
5.2. Packet Loss
As there is no error correction within the Vorbis stream, packet loss
will result in a loss of signal. Packet loss is more of an issue for
fragmented Vorbis packets as the client will have to cope with the
handling of the Fragment Type. In case of loss of fragments, the
client MUST discard all the remaining Vorbis fragments and decode the
incomplete packet. If we use the fragmented Vorbis packet example
above and the first RTP payload is lost, the client MUST detect that
the next RTP payload has the packet count field set to 0 and the
Fragment type 2 and MUST drop it. The next RTP payload, which is the
final fragmented packet, MUST be dropped in the same manner. If the
missing RTP payload is the last, the two fragments received will be
kept and the incomplete Vorbis packet decoded.
Loss of any of the Configuration fragment will result in the loss of
the full Configuration packet with the result detailed in the Loss of
Configuration Headers (Section 3.3) section.
Barbato Standards Track [Page 17]
RFC 5215 Vorbis RTP Payload Format August 2008
6. IANA Considerations
Type name: audio
Subtype name: vorbis
Required parameters:
rate: indicates the RTP timestamp clock rate as described in RTP
Profile for Audio and Video Conferences with Minimal Control
[RFC3551].
channels: indicates the number of audio channels as described in
RTP Profile for Audio and Video Conferences with Minimal
Control [RFC3551].
configuration: the base64 [RFC4648] representation of the Packed
Headers (Section 3.2.1).
Encoding considerations:
This media type is framed and contains binary data.
Security considerations:
See Section 10 of RFC 5215.
Interoperability considerations:
None
Published specification:
RFC 5215
Ogg Vorbis I specification: Codec setup and packet decode.
Available from the Xiph website, http://xiph.org/
Applications which use this media type:
Audio streaming and conferencing tools
Additional information:
None
Barbato Standards Track [Page 18]
RFC 5215 Vorbis RTP Payload Format August 2008
Person & email address to contact for further information:
Luca Barbato: <[email protected]>
IETF Audio/Video Transport Working Group
Intended usage:
COMMON
Restriction on usage:
This media type depends on RTP framing, hence is only defined for
transfer via RTP [RFC3550].
Author:
Luca Barbato
Change controller:
IETF AVT Working Group delegated from the IESG
6.1. Packed Headers IANA Considerations
The following IANA considerations refers to the split configuration
Packed Headers (Section 3.2.1) used within RFC 5215.
Type name: audio
Subtype name: vorbis-config
Required parameters:
None
Optional parameters:
None
Encoding considerations:
This media type contains binary data.
Security considerations:
See Section 10 of RFC 5215.
Barbato Standards Track [Page 19]
RFC 5215 Vorbis RTP Payload Format August 2008
Interoperability considerations:
None
Published specification:
RFC 5215
Applications which use this media type:
Vorbis encoded audio, configuration data
Additional information:
None
Person & email address to contact for further information:
Luca Barbato: <[email protected]>
IETF Audio/Video Transport Working Group
Intended usage: COMMON
Restriction on usage:
This media type doesn't depend on the transport.
Author:
Luca Barbato
Change controller:
IETF AVT Working Group delegated from the IESG
7. SDP Related Considerations
The following paragraphs define the mapping of the parameters
described in the IANA considerations section and their usage in the
Offer/Answer Model [RFC3264]. In order to be forward compatible, the
implementation MUST ignore unknown parameters.
7.1. Mapping Media Type Parameters into SDP
The information carried in the Media Type specification has a
specific mapping to fields in the Session Description Protocol (SDP)
[RFC4566], which is commonly used to describe RTP sessions. When SDP
is used to specify sessions, the mapping are as follows:
Barbato Standards Track [Page 20]
RFC 5215 Vorbis RTP Payload Format August 2008
o The type name ("audio") goes in SDP "m=" as the media name.
o The subtype name ("vorbis") goes in SDP "a=rtpmap" as the encoding
name.
o The parameter "rate" also goes in "a=rtpmap" as the clock rate.
o The parameter "channels" also goes in "a=rtpmap" as the channel
count.
o The mandated parameters "configuration" MUST be included in the
SDP "a=fmtp" attribute.
If the stream comprises chained Vorbis files and all of them are
known in advance, the Configuration Packet for each file SHOULD be
passed to the client using the configuration attribute.
The port value is specified by the server application bound to the
address specified in the c= line. The channel count value specified
in the rtpmap attribute SHOULD match the current Vorbis stream or
should be considered the maximum number of channels to be expected.
The timestamp clock rate MUST be a multiple of the sample rate; a
different payload number MUST be used if the clock rate changes. The
Configuration payload delivers the exact information, thus the SDP
information SHOULD be considered a hint. An example is found below.
7.1.1. SDP Example
The following example shows a basic SDP single stream. The first
configuration packet is inside the SDP; other configurations could be
fetched at any time from the URIs provided. The following base64
[RFC4648] configuration string is folded in this example due to RFC
line length limitations.
c=IN IP4 192.0.2.1
m=audio RTP/AVP 98
a=rtpmap:98 vorbis/44100/2
a=fmtp:98 configuration=AAAAAZ2f4g9NAh4aAXZvcmJpcwA...;
Note that the payload format (encoding) names are commonly shown in
uppercase. Media Type subtypes are commonly shown in lowercase.
These names are case-insensitive in both places. Similarly,
parameter names are case-insensitive both in Media Type types and in
the default mapping to the SDP a=fmtp attribute. The a=fmtp line is
Barbato Standards Track [Page 21]
RFC 5215 Vorbis RTP Payload Format August 2008
a single line, even if it is shown as multiple lines in this document
for clarity.
7.2. Usage with the SDP Offer/Answer Model
There are no negotiable parameters. All of them are declarative.
8. Congestion Control
The general congestion control considerations for transporting RTP
data apply to Vorbis audio over RTP as well. See the RTP
specification [RFC3550] and any applicable RTP profile (e.g.,
[RFC3551]). Audio data can be encoded using a range of different bit
rates, so it is possible to adapt network bandwidth by adjusting the
encoder bit rate in real time or by having multiple copies of content
encoded at different bit rates.
9. Example
The following example shows a common usage pattern that MAY be
applied in such a situation. The main scope of this section is to
explain better usage of the transmission vectors.
9.1. Stream Radio
This is one of the most common situations: there is one single server
streaming content in multicast, and the clients may start a session
at a random time. The content itself could be a mix of a live stream
(as the webjockey's voice) and stored streams (as the music she
plays).
In this situation, we don't know in advance how many codebooks we
will use. The clients can join anytime and users expect to start
listening to the content in a short time.
Upon joining, the client will receive the current Configuration
necessary to decode the current stream inside the SDP so that the
decoding will start immediately after.
When the streamed content changes, the new Configuration is sent in-
band before the actual stream, and the Configuration that has to be
sent inside the SDP is updated. Since the in-band method is
unreliable, an out-of-band fallback is provided.
The client may choose to fetch the Configuration from the alternate
source as soon as it discovers a Configuration packet got lost in-
band, or use selective retransmission [RFC3611] if the server
supports this feature.
Barbato Standards Track [Page 22]
RFC 5215 Vorbis RTP Payload Format August 2008
A server-side optimization would be to keep a hash list of the
Configurations per session, which avoids packing all of them and
sending the same Configuration with different Ident tags.
A client-side optimization would be to keep a tag list of the
Configurations per session and not process configuration packets that
are already known.
10. Security Considerations
RTP packets using this payload format are subject to the security
considerations discussed in the RTP specification [RFC3550], the
base64 specification [RFC4648], and the URI Generic syntax
specification [RFC3986]. Among other considerations, this implies
that the confidentiality of the media stream is achieved by using
encryption. Because the data compression used with this payload
format is applied end-to-end, encryption may be performed on the
compressed data.
11. Copying Conditions
The authors agree to grant third parties the irrevocable right to
copy, use, and distribute the work, with or without modification, in
any medium, without royalty, provided that, unless separate
permission is granted, redistributed modified works do not contain
misleading author, version, name of work, or endorsement information.
12. Acknowledgments
This document is a continuation of the following documents:
Moffitt, J., "RTP Payload Format for Vorbis Encoded Audio", February
2001.
Kerr, R., "RTP Payload Format for Vorbis Encoded Audio", December
2004.
The Media Type declaration is a continuation of the following
document:
Short, B., "The audio/rtp-vorbis MIME Type", January 2008.
Thanks to the AVT, Vorbis Communities / Xiph.Org Foundation including
Steve Casner, Aaron Colwell, Ross Finlayson, Fluendo, Ramon Garcia,
Pascal Hennequin, Ralph Giles, Tor-Einar Jarnbjo, Colin Law, John
Lazzaro, Jack Moffitt, Christopher Montgomery, Colin Perkins, Barry
Short, Mike Smith, Phil Kerr, Michael Sparks, Magnus Westerlund,
David Barrett, Silvia Pfeiffer, Stefan Ehmann, Gianni Ceccarelli, and
Barbato Standards Track [Page 23]
RFC 5215 Vorbis RTP Payload Format August 2008
Alessandro Salvatori. Thanks to the LScube Group, in particular
Federico Ridolfo, Francesco Varano, Giampaolo Mancini, Dario
Gallucci, and Juan Carlos De Martin.
13. References
13.1. Normative References
[RFC1191] Mogul, J. and S. Deering, "Path MTU discovery",
RFC 1191, November 1990.
[RFC1981] McCann, J., Deering, S., and J. Mogul, "Path MTU
Discovery for IP version 6", RFC 1981,
August 1996.
[RFC2119] Bradner, S., "Key words for use in RFCs to
Indicate Requirement Levels", BCP 14, RFC 2119,
March 1997.
[RFC3264] Rosenberg, J. and H. Schulzrinne, "An Offer/Answer
Model with Session Description Protocol (SDP)",
RFC 3264, June 2002.
[RFC3550] Schulzrinne, H., Casner, S., Frederick, R., and V.
Jacobson, "RTP: A Transport Protocol for Real-Time
Applications", STD 64, RFC 3550, July 2003.
[RFC3551] Schulzrinne, H. and S. Casner, "RTP Profile for
Audio and Video Conferences with Minimal Control",
STD 65, RFC 3551, July 2003.
[RFC3986] Berners-Lee, T., Fielding, R., and L. Masinter,
"Uniform Resource Identifier (URI): Generic
Syntax", STD 66, RFC 3986, January 2005.
[RFC4566] Handley, M., Jacobson, V., and C. Perkins, "SDP:
Session Description Protocol", RFC 4566,
July 2006.
[RFC4648] Josefsson, S., "The Base16, Base32, and Base64
Data Encodings", RFC 4648, October 2006.
[VORBIS-SPEC-REF] "Ogg Vorbis I specification: Codec setup and
packet decode. Available from the Xiph website,
http://xiph.org/vorbis/doc/Vorbis_I_spec.html".
Barbato Standards Track [Page 24]
RFC 5215 Vorbis RTP Payload Format August 2008
13.2. Informative References
[LIBVORBIS] "libvorbis: Available from the dedicated website,
http://vorbis.com/".
[RFC3533] Pfeiffer, S., "The Ogg Encapsulation Format
Version 0", RFC 3533, May 2003.
[RFC3611] Friedman, T., Caceres, R., and A. Clark, "RTP
Control Protocol Extended Reports (RTCP XR)",
RFC 3611, November 2003.
[RFC4588] Rey, J., Leon, D., Miyazaki, A., Varsa, V., and R.
Hakenberg, "RTP Retransmission Payload Format",
RFC 4588, July 2006.
Author's Address
Luca Barbato
Xiph.Org Foundation
EMail: [email protected]
URI: http://xiph.org/
Barbato Standards Track [Page 25]
RFC 5215 Vorbis RTP Payload Format August 2008
Full Copyright Statement
Copyright (C) The IETF Trust (2008).
This document is subject to the rights, licenses and restrictions
contained in BCP 78, and except as set forth therein, the authors
retain all their rights.
This document and the information contained herein are provided on an
"AS IS" basis and THE CONTRIBUTOR, THE ORGANIZATION HE/SHE REPRESENTS
OR IS SPONSORED BY (IF ANY), THE INTERNET SOCIETY, THE IETF TRUST AND
THE INTERNET ENGINEERING TASK FORCE DISCLAIM ALL WARRANTIES, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTY THAT THE USE OF
THE INFORMATION HEREIN WILL NOT INFRINGE ANY RIGHTS OR ANY IMPLIED
WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
Intellectual Property
The IETF takes no position regarding the validity or scope of any
Intellectual Property Rights or other rights that might be claimed to
pertain to the implementation or use of the technology described in
this document or the extent to which any license under such rights
might or might not be available; nor does it represent that it has
made any independent effort to identify any such rights. Information
on the procedures with respect to rights in RFC documents can be
found in BCP 78 and BCP 79.
Copies of IPR disclosures made to the IETF Secretariat and any
assurances of licenses to be made available, or the result of an
attempt made to obtain a general license or permission for the use of
such proprietary rights by implementers or users of this
specification can be obtained from the IETF on-line IPR repository at
http://www.ietf.org/ipr.
The IETF invites any interested party to bring to its attention any
copyrights, patents or patent applications, or other proprietary
rights that may cover technology that may be required to implement
this standard. Please address the information to the IETF at
[email protected].
Barbato Standards Track [Page 26]
|
{
"pile_set_name": "Github"
}
|
{
// Members
"members" : [
{
"first_name":"Jane",
"last_name":"Roe",
"events_attended":10,
"accept_waiver_of_liability" : true
},
/* Missing
*left*
brace */
"first_name":"John",
"last_name":"Doe",
"events_attended":2,
"accept_waiver_of_liability" : true
}
]
}
|
{
"pile_set_name": "Github"
}
|
#
# Copyright Soramitsu Co., Ltd. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
#
add_subdirectory(protobuf)
add_subdirectory(transaction_responses)
|
{
"pile_set_name": "Github"
}
|
///////////////////////////////////////////////////////////////////////////////
/// \file functional.hpp
/// Proto callables for various things
//
// Copyright 2010 Eric Niebler. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_PROTO_FUNCTIONAL_HPP_EAN_11_27_2010
#define BOOST_PROTO_FUNCTIONAL_HPP_EAN_11_27_2010
#include <boost/proto/functional/std.hpp>
#include <boost/proto/functional/fusion.hpp>
#include <boost/proto/functional/range.hpp>
#endif
|
{
"pile_set_name": "Github"
}
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('../../moment')) :
typeof define === 'function' && define.amd ? define(['../../moment'], factory) :
factory(global.moment)
}(this, function (moment) { 'use strict';
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<!-- See fburl.com/140690840 for information about i18n on Android -->
<!-- @generated -->
<!-- FB Locale: he_IL -->
<resources exclude-from-buck-resource-map="true">
<string name="com_facebook_like_button_not_liked" gender="unknown">לייק</string>
<string name="com_facebook_like_button_liked" gender="unknown">סימנת בלייק</string>
<string name="com_facebook_loginview_log_out_button" gender="unknown">התנתק/י</string>
<string name="com_facebook_loginview_log_in_button" gender="unknown">התחבר</string>
<string name="com_facebook_loginview_log_in_button_long" gender="unknown">התחברות באמצעות פייסבוק</string>
<string name="com_facebook_loginview_log_in_button_continue" gender="unknown">המשך/המשיכי עם פייסבוק</string>
<string name="com_facebook_loginview_logged_in_as" gender="unknown">מחובר/ת בתור: %1$s</string>
<string name="com_facebook_loginview_logged_in_using_facebook" gender="unknown">מחובר/ת כמשתמש בפייסבוק</string>
<string name="com_facebook_loginview_log_out_action" gender="unknown">התנתק/י</string>
<string name="com_facebook_loginview_cancel_action" gender="unknown">ביטול</string>
<string name="com_facebook_loading" gender="unknown">טוען...</string>
<string name="com_facebook_internet_permission_error_title" gender="unknown">שגיאת AndroidManifest</string>
<string name="com_facebook_internet_permission_error_message" gender="unknown">התחברות WebView דורשת הרשאת אינטרנט</string>
<string name="com_facebook_tooltip_default" gender="unknown">את/ה בשליטה - בחר/י אילו פרטים ברצונך לשתף עם אפליקציות.</string>
<string name="com_facebook_image_download_unknown_error" gender="unknown">שגיאה לא צפויה במהלך הורדת תמונה.</string>
<string name="com_facebook_share_button_text" gender="unknown">שתף/שתפי</string>
<string name="com_facebook_send_button_text" gender="unknown">שלח/י</string>
<string name="com_facebook_device_auth_instructions" gender="unknown">יש לבקר בכתובת facebook.com/device</b&gt ולהזין את הקוד המוצג למעלה</string>
<string name="com_facebook_smart_device_instructions" gender="unknown">כדי לחבר את חשבונך, יש לפתוח את אפליקציית פייסבוק במכשיר הנייד ולבדוק אם יש התראות.</string>
<string name="com_facebook_smart_device_instructions_or" gender="unknown">- או -</string>
<string name="com_facebook_smart_login_confirmation_title" gender="unknown">אישור ההתחברות</string>
<string name="com_facebook_smart_login_confirmation_continue_as" gender="unknown">המשך/המשיכי בתור %1$s</string>
<string name="com_facebook_smart_login_confirmation_cancel" gender="unknown">לא את/ה?</string>
</resources>
|
{
"pile_set_name": "Github"
}
|
<Project DefaultTargets="TinyCLR_Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
<PropertyGroup>
<AssemblyName>Microsoft.SPOT.Hardware.SerialPort</AssemblyName>
<OutputType>Library</OutputType>
<RootNamespace>Microsoft.SPOT</RootNamespace>
<ProjectTypeGuids>{b69e3092-b931-443c-abe7-7e7b65f2a37f};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
<ProductVersion>9.0.21022</ProductVersion>
<SchemaVersion>2.0</SchemaVersion>
<ProjectGuid>{81819D3C-CDC2-4286-AA7C-C80168746513}</ProjectGuid>
<NoWarn>$(NoWarn),0169</NoWarn>
<ComponentGuid>{f399a1c9-f698-437e-a436-2cf8b3eafaf5}</ComponentGuid>
<AssemblyBothEndian>true</AssemblyBothEndian>
</PropertyGroup>
<Import Project="$(SPOCLIENT)\tools\Targets\Microsoft.SPOT.CSharp.Targets" />
<PropertyGroup>
<!-- MMP_STUB options -->
<MMP_STUB_SKIP>false</MMP_STUB_SKIP>
<MMP_STUB_GenerateSkeletonFile>$(BUILD_TREE_STUBS)\spot_hardware_serial_native</MMP_STUB_GenerateSkeletonFile>
<MMP_STUB_GenerateSkeletonProject>SPOT_Hardware_serial</MMP_STUB_GenerateSkeletonProject>
</PropertyGroup>
<ItemGroup>
<MMP_STUB_Load Include="$(BUILD_TREE_PE)\Microsoft.SPOT.Hardware.SerialPort.pe">
<InProject>false</InProject>
</MMP_STUB_Load>
</ItemGroup>
<ItemGroup>
<Compile Include="SerialPort.cs" />
</ItemGroup>
<ItemGroup>
<Reference Include="Microsoft.SPOT.Native">
<HintPath>$(BUILD_TREE_DLL)\Microsoft.SPOT.Native.dll</HintPath>
</Reference>
<Reference Include="Microsoft.SPOT.Hardware">
<HintPath>$(BUILD_TREE_DLL)\Microsoft.SPOT.Hardware.dll</HintPath>
</Reference>
</ItemGroup>
</Project>
|
{
"pile_set_name": "Github"
}
|
#ifndef BOOST_THREAD_CONCURRENT_DEQUE_ADAPTOR_HPP
#define BOOST_THREAD_CONCURRENT_DEQUE_ADAPTOR_HPP
//////////////////////////////////////////////////////////////////////////////
//
// (C) Copyright Vicente J. Botet Escriba 2014. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/thread for documentation.
//
//////////////////////////////////////////////////////////////////////////////
#include <boost/thread/detail/config.hpp>
#include <boost/thread/detail/move.hpp>
#include <boost/thread/concurrent_queues/queue_op_status.hpp>
#include <boost/thread/concurrent_queues/deque_base.hpp>
#include <boost/config/abi_prefix.hpp>
namespace boost
{
namespace concurrent
{
namespace detail
{
template <typename Queue>
class deque_adaptor_copyable_only :
public boost::deque_base<typename Queue::value_type, typename Queue::size_type>
{
Queue queue;
public:
typedef typename Queue::value_type value_type;
typedef typename Queue::size_type size_type;
// Constructors/Assignment/Destructors
deque_adaptor_copyable_only() {}
// Observers
bool empty() const { return queue.empty(); }
bool full() const { return queue.full(); }
size_type size() const { return queue.size(); }
bool closed() const { return queue.closed(); }
// Modifiers
void close() { queue.close(); }
void push_back(const value_type& x) { queue.push_back(x); }
void pull_front(value_type& x) { queue.pull_front(x); };
value_type pull_front() { return queue.pull_front(); }
queue_op_status try_push_back(const value_type& x) { return queue.try_push_back(x); }
queue_op_status try_pull_front(value_type& x) { return queue.try_pull_front(x); }
queue_op_status nonblocking_push_back(const value_type& x) { return queue.nonblocking_push_back(x); }
queue_op_status nonblocking_pull_front(value_type& x) { return queue.nonblocking_pull_front(x); }
queue_op_status wait_push_back(const value_type& x) { return queue.wait_push_back(x); }
queue_op_status wait_pull_front(value_type& x) { return queue.wait_pull_front(x); }
};
template <typename Queue>
class deque_adaptor_movable_only :
public boost::deque_base<typename Queue::value_type, typename Queue::size_type>
{
Queue queue;
public:
typedef typename Queue::value_type value_type;
typedef typename Queue::size_type size_type;
// Constructors/Assignment/Destructors
deque_adaptor_movable_only() {}
// Observers
bool empty() const { return queue.empty(); }
bool full() const { return queue.full(); }
size_type size() const { return queue.size(); }
bool closed() const { return queue.closed(); }
// Modifiers
void close() { queue.close(); }
void pull_front(value_type& x) { queue.pull_front(x); };
// enable_if is_nothrow_copy_movable<value_type>
value_type pull_front() { return queue.pull_front(); }
queue_op_status try_pull_front(value_type& x) { return queue.try_pull_front(x); }
queue_op_status nonblocking_pull_front(value_type& x) { return queue.nonblocking_pull_front(x); }
queue_op_status wait_pull_front(value_type& x) { return queue.wait_pull_front(x); }
void push_back(BOOST_THREAD_RV_REF(value_type) x) { queue.push_back(boost::move(x)); }
queue_op_status try_push_back(BOOST_THREAD_RV_REF(value_type) x) { return queue.try_push_back(boost::move(x)); }
queue_op_status nonblocking_push_back(BOOST_THREAD_RV_REF(value_type) x) { return queue.nonblocking_push_back(boost::move(x)); }
queue_op_status wait_push_back(BOOST_THREAD_RV_REF(value_type) x) { return queue.wait_push_back(boost::move(x)); }
};
template <typename Queue>
class deque_adaptor_copyable_and_movable :
public boost::deque_base<typename Queue::value_type, typename Queue::size_type>
{
Queue queue;
public:
typedef typename Queue::value_type value_type;
typedef typename Queue::size_type size_type;
// Constructors/Assignment/Destructors
deque_adaptor_copyable_and_movable() {}
// Observers
bool empty() const { return queue.empty(); }
bool full() const { return queue.full(); }
size_type size() const { return queue.size(); }
bool closed() const { return queue.closed(); }
// Modifiers
void close() { queue.close(); }
void push_back(const value_type& x) { queue.push_back(x); }
void pull_front(value_type& x) { queue.pull_front(x); };
// enable_if is_nothrow_copy_movable<value_type>
value_type pull_front() { return queue.pull_front(); }
queue_op_status try_push_back(const value_type& x) { return queue.try_push_back(x); }
queue_op_status try_pull_front(value_type& x) { return queue.try_pull_front(x); }
queue_op_status nonblocking_push_back(const value_type& x) { return queue.nonblocking_push_back(x); }
queue_op_status nonblocking_pull_front(value_type& x) { return queue.nonblocking_pull_front(x); }
queue_op_status wait_push_back(const value_type& x) { return queue.wait_push_back(x); }
queue_op_status wait_pull_front(value_type& x) { return queue.wait_pull_front(x); }
void push_back(BOOST_THREAD_RV_REF(value_type) x) { queue.push_back(boost::move(x)); }
queue_op_status try_push_back(BOOST_THREAD_RV_REF(value_type) x) { return queue.try_push_back(boost::move(x)); }
queue_op_status nonblocking_push_back(BOOST_THREAD_RV_REF(value_type) x) { return queue.nonblocking_push_back(boost::move(x)); }
queue_op_status wait_push_back(BOOST_THREAD_RV_REF(value_type) x) { return queue.wait_push_back(boost::move(x)); }
};
template <class Q, class T,
#if ! defined BOOST_NO_CXX11_RVALUE_REFERENCES
#if defined __GNUC__ && ! defined __clang__
#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 5) || !defined(__GXX_EXPERIMENTAL_CXX0X__)
bool Copyable = is_copy_constructible<T>::value,
bool Movable = true
#else
bool Copyable = std::is_copy_constructible<T>::value && std::is_copy_assignable<T>::value,
bool Movable = std::is_move_constructible<T>::value && std::is_move_assignable<T>::value
#endif // __GNUC__
#elif defined _MSC_VER
#if _MSC_VER < 1700
bool Copyable = is_copy_constructible<T>::value,
bool Movable = true
#else
bool Copyable = std::is_copy_constructible<T>::value && std::is_copy_assignable<T>::value,
bool Movable = std::is_move_constructible<T>::value && std::is_move_assignable<T>::value
#endif // _MSC_VER
#else
bool Copyable = std::is_copy_constructible<T>::value && std::is_copy_assignable<T>::value,
bool Movable = std::is_move_constructible<T>::value && std::is_move_assignable<T>::value
#endif
#else
bool Copyable = is_copy_constructible<T>::value,
bool Movable = has_move_emulation_enabled<T>::value
#endif
>
struct deque_adaptor;
template <class Q, class T>
struct deque_adaptor<Q, T, true, true> {
typedef deque_adaptor_copyable_and_movable<Q> type;
};
template <class Q, class T>
struct deque_adaptor<Q, T, true, false> {
typedef deque_adaptor_copyable_only<Q> type;
};
template <class Q, class T>
struct deque_adaptor<Q, T, false, true> {
typedef deque_adaptor_movable_only<Q> type;
};
}
template <typename Queue>
class deque_adaptor :
public detail::deque_adaptor<Queue, typename Queue::value_type>::type
{
public:
typedef typename Queue::value_type value_type;
typedef typename Queue::size_type size_type;
// Constructors/Assignment/Destructors
virtual ~deque_adaptor() {};
};
}
using concurrent::deque_adaptor;
}
#include <boost/config/abi_suffix.hpp>
#endif
|
{
"pile_set_name": "Github"
}
|
#
# Copyright (C) 2015 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
class TempCache
# tl;dr wrap code around an `enable` block
# and then cache pieces that would otherwise get called over and over again
def self.enable
if @enabled
yield
else
begin
clear
@enabled = true
yield
ensure
@enabled = false
clear
end
end
end
def self.clear
@cache = {}
end
def self.create_key(*args)
args.map{|arg| arg.is_a?(ActiveRecord::Base) ? arg.global_asset_string : arg.to_s }.join("/")
end
def self.cache(*args)
if @enabled
key = create_key(*args)
if @cache.has_key?(key)
@cache[key]
else
@cache[key] = yield
end
else
yield
end
end
end
|
{
"pile_set_name": "Github"
}
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/supervised_user/legacy/custodian_profile_downloader_service_factory.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/signin/profile_oauth2_token_service_factory.h"
#include "chrome/browser/signin/signin_manager_factory.h"
#include "chrome/browser/supervised_user/legacy/custodian_profile_downloader_service.h"
#include "components/keyed_service/content/browser_context_dependency_manager.h"
// static
CustodianProfileDownloaderService*
CustodianProfileDownloaderServiceFactory::GetForProfile(
Profile* profile) {
return static_cast<CustodianProfileDownloaderService*>(
GetInstance()->GetServiceForBrowserContext(profile, true));
}
// static
CustodianProfileDownloaderServiceFactory*
CustodianProfileDownloaderServiceFactory::GetInstance() {
return base::Singleton<CustodianProfileDownloaderServiceFactory>::get();
}
CustodianProfileDownloaderServiceFactory::
CustodianProfileDownloaderServiceFactory()
: BrowserContextKeyedServiceFactory(
"CustodianProfileDownloaderService",
BrowserContextDependencyManager::GetInstance()) {
// Indirect dependency via ProfileDownloader.
DependsOn(ProfileOAuth2TokenServiceFactory::GetInstance());
DependsOn(SigninManagerFactory::GetInstance());
}
CustodianProfileDownloaderServiceFactory::
~CustodianProfileDownloaderServiceFactory() {}
KeyedService* CustodianProfileDownloaderServiceFactory::BuildServiceInstanceFor(
content::BrowserContext* profile) const {
return new CustodianProfileDownloaderService(static_cast<Profile*>(profile));
}
|
{
"pile_set_name": "Github"
}
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class Shell32
{
internal const int COR_E_PLATFORMNOTSUPPORTED = unchecked((int)0x80131539);
// https://msdn.microsoft.com/en-us/library/windows/desktop/bb762188.aspx
[DllImport(Libraries.Shell32, CharSet = CharSet.Unicode, SetLastError = false, BestFitMapping = false, ExactSpelling = true)]
internal static extern int SHGetKnownFolderPath(
[MarshalAs(UnmanagedType.LPStruct)] Guid rfid,
uint dwFlags,
IntPtr hToken,
out string ppszPath);
// https://msdn.microsoft.com/en-us/library/windows/desktop/dd378457.aspx
internal static class KnownFolders
{
/// <summary>
/// (CSIDL_ADMINTOOLS) Per user Administrative Tools
/// "%APPDATA%\Microsoft\Windows\Start Menu\Programs\Administrative Tools"
/// </summary>
internal const string AdminTools = "{724EF170-A42D-4FEF-9F26-B60E846FBA4F}";
/// <summary>
/// (CSIDL_CDBURN_AREA) Temporary Burn folder
/// "%LOCALAPPDATA%\Microsoft\Windows\Burn\Burn"
/// </summary>
internal const string CDBurning = "{9E52AB10-F80D-49DF-ACB8-4330F5687855}";
/// <summary>
/// (CSIDL_COMMON_ADMINTOOLS) Common Administrative Tools
/// "%ALLUSERSPROFILE%\Microsoft\Windows\Start Menu\Programs\Administrative Tools"
/// </summary>
internal const string CommonAdminTools = "{D0384E7D-BAC3-4797-8F14-CBA229B392B5}";
/// <summary>
/// (CSIDL_COMMON_OEM_LINKS) OEM Links folder
/// "%ALLUSERSPROFILE%\OEM Links"
/// </summary>
internal const string CommonOEMLinks = "{C1BAE2D0-10DF-4334-BEDD-7AA20B227A9D}";
/// <summary>
/// (CSIDL_COMMON_PROGRAMS) Common Programs folder
/// "%ALLUSERSPROFILE%\Microsoft\Windows\Start Menu\Programs"
/// </summary>
internal const string CommonPrograms = "{0139D44E-6AFE-49F2-8690-3DAFCAE6FFB8}";
/// <summary>
/// (CSIDL_COMMON_STARTMENU) Common Start Menu folder
/// "%ALLUSERSPROFILE%\Microsoft\Windows\Start Menu"
/// </summary>
internal const string CommonStartMenu = "{A4115719-D62E-491D-AA7C-E74B8BE3B067}";
/// <summary>
/// (CSIDL_COMMON_STARTUP, CSIDL_COMMON_ALTSTARTUP) Common Startup folder
/// "%ALLUSERSPROFILE%\Microsoft\Windows\Start Menu\Programs\StartUp"
/// </summary>
internal const string CommonStartup = "{82A5EA35-D9CD-47C5-9629-E15D2F714E6E}";
/// <summary>
/// (CSIDL_COMMON_TEMPLATES) Common Templates folder
/// "%ALLUSERSPROFILE%\Microsoft\Windows\Templates"
/// </summary>
internal const string CommonTemplates = "{B94237E7-57AC-4347-9151-B08C6C32D1F7}";
/// <summary>
/// (CSIDL_DRIVES) Computer virtual folder
/// </summary>
internal const string ComputerFolder = "{0AC0837C-BBF8-452A-850D-79D08E667CA7}";
/// <summary>
/// (CSIDL_CONNECTIONS) Network Connections virtual folder
/// </summary>
internal const string ConnectionsFolder = "{6F0CD92B-2E97-45D1-88FF-B0D186B8DEDD}";
/// <summary>
/// (CSIDL_CONTROLS) Control Panel virtual folder
/// </summary>
internal const string ControlPanelFolder = "{82A74AEB-AEB4-465C-A014-D097EE346D63}";
/// <summary>
/// (CSIDL_COOKIES) Cookies folder
/// "%APPDATA%\Microsoft\Windows\Cookies"
/// </summary>
internal const string Cookies = "{2B0F765D-C0E9-4171-908E-08A611B84FF6}";
/// <summary>
/// (CSIDL_DESKTOP, CSIDL_DESKTOPDIRECTORY) Desktop folder
/// "%USERPROFILE%\Desktop"
/// </summary>
internal const string Desktop = "{B4BFCC3A-DB2C-424C-B029-7FE99A87C641}";
/// <summary>
/// (CSIDL_MYDOCUMENTS, CSIDL_PERSONAL) Documents (My Documents) folder
/// "%USERPROFILE%\Documents"
/// </summary>
internal const string Documents = "{FDD39AD0-238F-46AF-ADB4-6C85480369C7}";
/// <summary>
/// (CSIDL_FAVORITES, CSIDL_COMMON_FAVORITES) Favorites folder
/// "%USERPROFILE%\Favorites"
/// </summary>
internal const string Favorites = "{1777F761-68AD-4D8A-87BD-30B759FA33DD}";
/// <summary>
/// (CSIDL_FONTS) Fonts folder
/// "%windir%\Fonts"
/// </summary>
internal const string Fonts = "{FD228CB7-AE11-4AE3-864C-16F3910AB8FE}";
/// <summary>
/// (CSIDL_HISTORY) History folder
/// "%LOCALAPPDATA%\Microsoft\Windows\History"
/// </summary>
internal const string History = "{D9DC8A3B-B784-432E-A781-5A1130A75963}";
/// <summary>
/// (CSIDL_INTERNET_CACHE) Temporary Internet Files folder
/// "%LOCALAPPDATA%\Microsoft\Windows\Temporary Internet Files"
/// </summary>
internal const string InternetCache = "{352481E8-33BE-4251-BA85-6007CAEDCF9D}";
/// <summary>
/// (CSIDL_INTERNET) The Internet virtual folder
/// </summary>
internal const string InternetFolder = "{4D9F7874-4E0C-4904-967B-40B0D20C3E4B}";
/// <summary>
/// (CSIDL_LOCAL_APPDATA) Local folder
/// "%LOCALAPPDATA%" ("%USERPROFILE%\AppData\Local")
/// </summary>
internal const string LocalAppData = "{F1B32785-6FBA-4FCF-9D55-7B8E7F157091}";
/// <summary>
/// (CSIDL_RESOURCES_LOCALIZED) Fixed localized resources folder
/// "%windir%\resources\0409" (per active codepage)
/// </summary>
internal const string LocalizedResourcesDir = "{2A00375E-224C-49DE-B8D1-440DF7EF3DDC}";
/// <summary>
/// (CSIDL_MYMUSIC) Music folder
/// "%USERPROFILE%\Music"
/// </summary>
internal const string Music = "{4BD8D571-6D19-48D3-BE97-422220080E43}";
/// <summary>
/// (CSIDL_NETHOOD) Network shortcuts folder "%APPDATA%\Microsoft\Windows\Network Shortcuts"
/// </summary>
internal const string NetHood = "{C5ABBF53-E17F-4121-8900-86626FC2C973}";
/// <summary>
/// (CSIDL_NETWORK, CSIDL_COMPUTERSNEARME) Network virtual folder
/// </summary>
internal const string NetworkFolder = "{D20BEEC4-5CA8-4905-AE3B-BF251EA09B53}";
/// <summary>
/// (CSIDL_MYPICTURES) Pictures folder "%USERPROFILE%\Pictures"
/// </summary>
internal const string Pictures = "{33E28130-4E1E-4676-835A-98395C3BC3BB}";
/// <summary>
/// (CSIDL_PRINTERS) Printers virtual folder
/// </summary>
internal const string PrintersFolder = "{76FC4E2D-D6AD-4519-A663-37BD56068185}";
/// <summary>
/// (CSIDL_PRINTHOOD) Printer Shortcuts folder
/// "%APPDATA%\Microsoft\Windows\Printer Shortcuts"
/// </summary>
internal const string PrintHood = "{9274BD8D-CFD1-41C3-B35E-B13F55A758F4}";
/// <summary>
/// (CSIDL_PROFILE) The root users profile folder "%USERPROFILE%"
/// ("%SystemDrive%\Users\%USERNAME%")
/// </summary>
internal const string Profile = "{5E6C858F-0E22-4760-9AFE-EA3317B67173}";
/// <summary>
/// (CSIDL_COMMON_APPDATA) ProgramData folder
/// "%ALLUSERSPROFILE%" ("%ProgramData%", "%SystemDrive%\ProgramData")
/// </summary>
internal const string ProgramData = "{62AB5D82-FDC1-4DC3-A9DD-070D1D495D97}";
/// <summary>
/// (CSIDL_PROGRAM_FILES) Program Files folder for the current process architecture
/// "%ProgramFiles%" ("%SystemDrive%\Program Files")
/// </summary>
internal const string ProgramFiles = "{905e63b6-c1bf-494e-b29c-65b732d3d21a}";
/// <summary>
/// (CSIDL_PROGRAM_FILESX86) 32 bit Program Files folder (available to both 32/64 bit processes)
/// </summary>
internal const string ProgramFilesX86 = "{7C5A40EF-A0FB-4BFC-874A-C0F2E0B9FA8E}";
/// <summary>
/// (CSIDL_PROGRAM_FILES_COMMON) Common Program Files folder for the current process architecture
/// "%ProgramFiles%\Common Files"
/// </summary>
internal const string ProgramFilesCommon = "{F7F1ED05-9F6D-47A2-AAAE-29D317C6F066}";
/// <summary>
/// (CSIDL_PROGRAM_FILES_COMMONX86) Common 32 bit Program Files folder (available to both 32/64 bit processes)
/// </summary>
internal const string ProgramFilesCommonX86 = "{DE974D24-D9C6-4D3E-BF91-F4455120B917}";
/// <summary>
/// (CSIDL_PROGRAMS) Start menu Programs folder
/// "%APPDATA%\Microsoft\Windows\Start Menu\Programs"
/// </summary>
internal const string Programs = "{A77F5D77-2E2B-44C3-A6A2-ABA601054A51}";
/// <summary>
/// (CSIDL_COMMON_DESKTOPDIRECTORY) Public Desktop folder
/// "%PUBLIC%\Desktop"
/// </summary>
internal const string PublicDesktop = "{C4AA340D-F20F-4863-AFEF-F87EF2E6BA25}";
/// <summary>
/// (CSIDL_COMMON_DOCUMENTS) Public Documents folder
/// "%PUBLIC%\Documents"
/// </summary>
internal const string PublicDocuments = "{ED4824AF-DCE4-45A8-81E2-FC7965083634}";
/// <summary>
/// (CSIDL_COMMON_MUSIC) Public Music folder
/// "%PUBLIC%\Music"
/// </summary>
internal const string PublicMusic = "{3214FAB5-9757-4298-BB61-92A9DEAA44FF}";
/// <summary>
/// (CSIDL_COMMON_PICTURES) Public Pictures folder
/// "%PUBLIC%\Pictures"
/// </summary>
internal const string PublicPictures = "{B6EBFB86-6907-413C-9AF7-4FC2ABF07CC5}";
/// <summary>
/// (CSIDL_COMMON_VIDEO) Public Videos folder
/// "%PUBLIC%\Videos"
/// </summary>
internal const string PublicVideos = "{2400183A-6185-49FB-A2D8-4A392A602BA3}";
/// <summary>
/// (CSIDL_RECENT) Recent Items folder
/// "%APPDATA%\Microsoft\Windows\Recent"
/// </summary>
internal const string Recent = "{AE50C081-EBD2-438A-8655-8A092E34987A}";
/// <summary>
/// (CSIDL_BITBUCKET) Recycle Bin virtual folder
/// </summary>
internal const string RecycleBinFolder = "{B7534046-3ECB-4C18-BE4E-64CD4CB7D6AC}";
/// <summary>
/// (CSIDL_RESOURCES) Resources fixed folder
/// "%windir%\Resources"
/// </summary>
internal const string ResourceDir = "{8AD10C31-2ADB-4296-A8F7-E4701232C972}";
/// <summary>
/// (CSIDL_APPDATA) Roaming user application data folder
/// "%APPDATA%" ("%USERPROFILE%\AppData\Roaming")
/// </summary>
internal const string RoamingAppData = "{3EB685DB-65F9-4CF6-A03A-E3EF65729F3D}";
/// <summary>
/// (CSIDL_SENDTO) SendTo folder
/// "%APPDATA%\Microsoft\Windows\SendTo"
/// </summary>
internal const string SendTo = "{8983036C-27C0-404B-8F08-102D10DCFD74}";
/// <summary>
/// (CSIDL_STARTMENU) Start Menu folder
/// "%APPDATA%\Microsoft\Windows\Start Menu"
/// </summary>
internal const string StartMenu = "{625B53C3-AB48-4EC1-BA1F-A1EF4146FC19}";
/// <summary>
/// (CSIDL_STARTUP, CSIDL_ALTSTARTUP) Startup folder
/// "%APPDATA%\Microsoft\Windows\Start Menu\Programs\StartUp"
/// </summary>
internal const string Startup = "{B97D20BB-F46A-4C97-BA10-5E3608430854}";
/// <summary>
/// (CSIDL_SYSTEM) System32 folder
/// "%windir%\system32"
/// </summary>
internal const string System = "{1AC14E77-02E7-4E5D-B744-2EB1AE5198B7}";
/// <summary>
/// (CSIDL_SYSTEMX86) X86 System32 folder
/// "%windir%\system32" or "%windir%\syswow64"
/// </summary>
internal const string SystemX86 = "{D65231B0-B2F1-4857-A4CE-A8E7C6EA7D27}";
/// <summary>
/// (CSIDL_TEMPLATES) Templates folder
/// "%APPDATA%\Microsoft\Windows\Templates"
/// </summary>
internal const string Templates = "{A63293E8-664E-48DB-A079-DF759E0509F7}";
/// <summary>
/// (CSIDL_MYVIDEO) Videos folder
/// "%USERPROFILE%\Videos"
/// </summary>
internal const string Videos = "{18989B1D-99B5-455B-841C-AB7C74E4DDFC}";
/// <summary>
/// (CSIDL_WINDOWS) Windows folder "%windir%"
/// </summary>
internal const string Windows = "{F38BF404-1D43-42F2-9305-67DE0B28FC23}";
}
}
}
|
{
"pile_set_name": "Github"
}
|
fileFormatVersion: 2
guid: 6afb7eccb00aa874c96d22978afaa858
timeCreated: 1487761034
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
|
{
"pile_set_name": "Github"
}
|
/*
Copyright (c) 2004-2011, The Dojo Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
if(!dojo._hasResource["dojox.charting.themes.WatersEdge"]){
dojo._hasResource["dojox.charting.themes.WatersEdge"]=true;
dojo.provide("dojox.charting.themes.WatersEdge");
dojo.require("dojox.charting.Theme");
(function(){
var _1=dojox.charting;
_1.themes.WatersEdge=new _1.Theme({colors:["#437cc0","#6256a5","#4552a3","#43c4f2","#4b66b0"]});
})();
}
|
{
"pile_set_name": "Github"
}
|
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2013 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "exponential.H"
#include "addToRunTimeSelectionTable.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
namespace Foam
{
namespace ParticleStressModels
{
defineTypeNameAndDebug(exponential, 0);
addToRunTimeSelectionTable
(
ParticleStressModel,
exponential,
dictionary
);
}
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::ParticleStressModels::exponential::exponential
(
const dictionary& dict
)
:
ParticleStressModel(dict),
preExp_(readScalar(dict.lookup("preExp"))),
expMax_(readScalar(dict.lookup("expMax"))),
g0_(readScalar(dict.lookup("g0")))
{}
Foam::ParticleStressModels::exponential::exponential
(
const exponential& hc
)
:
ParticleStressModel(hc),
preExp_(hc.preExp_),
expMax_(hc.expMax_),
g0_(hc.g0_)
{}
// * * * * * * * * * * * * * * * * Destructor * * * * * * * * * * * * * * * //
Foam::ParticleStressModels::exponential::~exponential()
{}
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
Foam::tmp<Foam::Field<Foam::scalar> >
Foam::ParticleStressModels::exponential::tau
(
const Field<scalar>& alpha,
const Field<scalar>& rho,
const Field<scalar>& uSqr
) const
{
return dTaudTheta(alpha, rho, uSqr)/preExp_;
}
Foam::tmp<Foam::Field<Foam::scalar> >
Foam::ParticleStressModels::exponential::dTaudTheta
(
const Field<scalar>& alpha,
const Field<scalar>& rho,
const Field<scalar>& uSqr
) const
{
return
g0_
*min
(
exp(preExp_*(alpha - alphaPacked_)),
expMax_
);
}
// ************************************************************************* //
|
{
"pile_set_name": "Github"
}
|
$mol_theme_auto $mol_plugin
attr * mol_theme <= theme \
|
{
"pile_set_name": "Github"
}
|
'''tzinfo timezone information for Africa/Libreville.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Libreville(DstTzInfo):
'''Africa/Libreville timezone definition. See datetime.tzinfo for details'''
zone = 'Africa/Libreville'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1911,12,31,23,22,12),
]
_transition_info = [
i(2280,0,'LMT'),
i(3600,0,'WAT'),
]
Libreville = Libreville()
|
{
"pile_set_name": "Github"
}
|
@startuml
sprite $info_circle [48x48/16] {
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
0000000000000000000379ABBA8520000000000000000000
00000000000000004AFFFFFFFFFFFD810000000000000000
000000000000004CFFFFFFFFFFFFFFFF9100000000000000
00000000000009FFFFFFFFFFFFFFFFFFFE50000000000000
000000000002DFFFFFFFD222226FFFFFFFF9000000000000
00000000003EFFFFFFFFB000001FFFFFFFFFB00000000000
0000000002EFFFFFFFFFB000001FFFFFFFFFFA0000000000
000000000DFFFFFFFFFFB000001FFFFFFFFFFF7000000000
000000009FFFFFFFFFFFB000002FFFFFFFFFFFF300000000
00000003FFFFFFFFFFFFF87777BFFFFFFFFFFFFD00000000
0000000CFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF50000000
0000003FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFD0000000
0000009FFFFFFFFFFFA9999999CFFFFFFFFFFFFFF3000000
000000FFFFFFFFFFFA000000002FFFFFFFFFFFFFF8000000
000003FFFFFFFFFFF9000000001FFFFFFFFFFFFFFC000000
000006FFFFFFFFFFF9000000001FFFFFFFFFFFFFFF000000
000009FFFFFFFFFFF9000000001FFFFFFFFFFFFFFF200000
00000AFFFFFFFFFFFC000000001FFFFFFFFFFFFFFF300000
00000BFFFFFFFFFFFFFFB000001FFFFFFFFFFFFFFF400000
00000AFFFFFFFFFFFFFFB000001FFFFFFFFFFFFFFF400000
000009FFFFFFFFFFFFFFB000001FFFFFFFFFFFFFFF300000
000008FFFFFFFFFFFFFFB000001FFFFFFFFFFFFFFF100000
000005FFFFFFFFFFFFFFB000001FFFFFFFFFFFFFFE000000
000001FFFFFFFFFFFFFFB000001FFFFFFFFFFFFFFB000000
000000CFFFFFFFFFFFFFB000001FFFFFFFFFFFFFF6000000
0000007FFFFFFFFFFFCB8000001BBDFFFFFFFFFFF1000000
0000001FFFFFFFFFFA000000000001FFFFFFFFFF90000000
00000008FFFFFFFFF9000000000000FFFFFFFFFF20000000
00000000EFFFFFFFF9000000000000FFFFFFFFF800000000
000000004FFFFFFFF9000000000000FFFFFFFFD000000000
0000000008FFFFFFFB000000000002FFFFFFFE2000000000
0000000000AFFFFFFFEDDDDDDDDDDFFFFFFFF30000000000
000000000009FFFFFFFFFFFFFFFFFFFFFFFE300000000000
0000000000006FFFFFFFFFFFFFFFFFFFFFD2000000000000
00000000000002CFFFFFFFFFFFFFFFFFF800000000000000
0000000000000004CFFFFFFFFFFFFFF91000000000000000
0000000000000000028CFFFFFFFDA5000000000000000000
000000000000000000000134321000000000000000000000
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
000000000000000000000000000000000000000000000000
}
!define FA_INFO_CIRCLE(_alias) ENTITY(rectangle,black,info_circle,_alias,FA INFO_CIRCLE)
!define FA_INFO_CIRCLE(_alias, _label) ENTITY(rectangle,black,info_circle,_label, _alias,FA INFO_CIRCLE)
!define FA_INFO_CIRCLE(_alias, _label, _shape) ENTITY(_shape,black,info_circle,_label, _alias,FA INFO_CIRCLE)
!define FA_INFO_CIRCLE(_alias, _label, _shape, _color) ENTITY(_shape,_color,info_circle,_label, _alias,FA INFO_CIRCLE)
skinparam folderBackgroundColor<<FA INFO_CIRCLE>> White
@enduml
|
{
"pile_set_name": "Github"
}
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova.inappbrowser;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.util.Log;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Created by Oliver on 22/11/2013.
*/
public class InAppBrowserDialog extends Dialog {
Context context;
InAppBrowser inAppBrowser = null;
public InAppBrowserDialog(Context context, int theme) {
super(context, theme);
this.context = context;
}
public void setInAppBroswer(InAppBrowser browser) {
this.inAppBrowser = browser;
}
public void onBackPressed () {
if (this.inAppBrowser == null) {
this.dismiss();
} else {
// better to go through the in inAppBrowser
// because it does a clean up
this.inAppBrowser.closeDialog();
}
}
}
|
{
"pile_set_name": "Github"
}
|
APP_ABI := armeabi x86 mips
APP_PLATFORM := android-8
APP_STL := stlport_static
APP_CPPFLAGS := -fno-rtti -fno-exceptions
NDK_TOOLCHAIN_VERSION=4.4.3
#APP_OPTIM := debug
|
{
"pile_set_name": "Github"
}
|
/*
* /MathJax/jax/output/HTML-CSS/fonts/TeX/SansSerif/Regular/Main.js
*
* Copyright (c) 2012 Design Science, Inc.
*
* Part of the MathJax library.
* See http://www.mathjax.org for details.
*
* Licensed under the Apache License, Version 2.0;
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
MathJax.OutputJax["HTML-CSS"].FONTDATA.FONTS.MathJax_SansSerif={directory:"SansSerif/Regular",family:"MathJax_SansSerif",testString:"MathJax SansSerif ^ _",Ranges:[[0,127,"BasicLatin"],[128,65535,"Other"],[768,879,"CombDiacritMarks"]]};MathJax.Callback.Queue(["initFont",MathJax.OutputJax["HTML-CSS"],"MathJax_SansSerif"],["loadComplete",MathJax.Ajax,MathJax.OutputJax["HTML-CSS"].fontDir+"/SansSerif/Regular/Main.js"]);
|
{
"pile_set_name": "Github"
}
|
// Boost.Range library
//
// Copyright Thorsten Ottosen 2003-2004. Use, modification and
// distribution is subject to the Boost Software License, Version
// 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// For more information, see http://www.boost.org/libs/range/
//
#ifndef BOOST_RANGE_RESULT_ITERATOR_HPP
#define BOOST_RANGE_RESULT_ITERATOR_HPP
#if defined(_MSC_VER)
# pragma once
#endif
#include <boost/range/iterator.hpp>
namespace boost
{
//
// This interface is deprecated, use range_iterator<T>
//
template< typename C >
struct range_result_iterator : range_iterator<C>
{ };
} // namespace boost
#endif
|
{
"pile_set_name": "Github"
}
|
'use strict';
//This file contains the ES6 extensions to the core Promises/A+ API
var Promise = require('./core.js');
module.exports = Promise;
/* Static Functions */
var TRUE = valuePromise(true);
var FALSE = valuePromise(false);
var NULL = valuePromise(null);
var UNDEFINED = valuePromise(undefined);
var ZERO = valuePromise(0);
var EMPTYSTRING = valuePromise('');
function valuePromise(value) {
var p = new Promise(Promise._61);
p._65 = 1;
p._55 = value;
return p;
}
Promise.resolve = function (value) {
if (value instanceof Promise) return value;
if (value === null) return NULL;
if (value === undefined) return UNDEFINED;
if (value === true) return TRUE;
if (value === false) return FALSE;
if (value === 0) return ZERO;
if (value === '') return EMPTYSTRING;
if (typeof value === 'object' || typeof value === 'function') {
try {
var then = value.then;
if (typeof then === 'function') {
return new Promise(then.bind(value));
}
} catch (ex) {
return new Promise(function (resolve, reject) {
reject(ex);
});
}
}
return valuePromise(value);
};
Promise.all = function (arr) {
var args = Array.prototype.slice.call(arr);
return new Promise(function (resolve, reject) {
if (args.length === 0) return resolve([]);
var remaining = args.length;
function res(i, val) {
if (val && (typeof val === 'object' || typeof val === 'function')) {
if (val instanceof Promise && val.then === Promise.prototype.then) {
while (val._65 === 3) {
val = val._55;
}
if (val._65 === 1) return res(i, val._55);
if (val._65 === 2) reject(val._55);
val.then(function (val) {
res(i, val);
}, reject);
return;
} else {
var then = val.then;
if (typeof then === 'function') {
var p = new Promise(then.bind(val));
p.then(function (val) {
res(i, val);
}, reject);
return;
}
}
}
args[i] = val;
if (--remaining === 0) {
resolve(args);
}
}
for (var i = 0; i < args.length; i++) {
res(i, args[i]);
}
});
};
Promise.reject = function (value) {
return new Promise(function (resolve, reject) {
reject(value);
});
};
Promise.race = function (values) {
return new Promise(function (resolve, reject) {
values.forEach(function(value){
Promise.resolve(value).then(resolve, reject);
});
});
};
/* Prototype Methods */
Promise.prototype['catch'] = function (onRejected) {
return this.then(null, onRejected);
};
|
{
"pile_set_name": "Github"
}
|
/*
*
* * Nextcloud Talk application
* *
* * @author Mario Danic
* * Copyright (C) 2017-2020 Mario Danic <[email protected]>
* *
* * This program is free software: you can redistribute it and/or modify
* * it under the terms of the GNU General Public License as published by
* * the Free Software Foundation, either version 3 of the License, or
* * at your option) any later version.
* *
* * This program is distributed in the hope that it will be useful,
* * but WITHOUT ANY WARRANTY; without even the implied warranty of
* * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* * GNU General Public License for more details.
* *
* * You should have received a copy of the GNU General Public License
* * along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.nextcloud.talk.newarch.domain.usecases
import com.nextcloud.talk.models.json.chat.ChatOverall
import com.nextcloud.talk.newarch.data.source.remote.ApiErrorHandler
import com.nextcloud.talk.newarch.domain.repository.online.NextcloudTalkRepository
import com.nextcloud.talk.newarch.domain.usecases.base.UseCase
import org.koin.core.parameter.DefinitionParameters
import retrofit2.Response
class GetChatMessagesUseCase constructor(
private val nextcloudTalkRepository: NextcloudTalkRepository,
apiErrorHandler: ApiErrorHandler?
) : UseCase<Response<ChatOverall>, Any?>(apiErrorHandler) {
override suspend fun run(params: Any?): Response<ChatOverall> {
val definitionParameters = params as DefinitionParameters
return nextcloudTalkRepository.getChatMessagesForConversation(definitionParameters[0], definitionParameters[1], definitionParameters[2], definitionParameters[3], definitionParameters[4])
}
}
|
{
"pile_set_name": "Github"
}
|
{
"about": "JSON lexer by Alexey Torgashin",
"files": "*.json;*.cuda-litelexer",
"cmt_line": "//",
"cmt_block_1": "",
"cmt_block_2": "",
"case_sens": true,
"rules": {
"cmt": {
"regex": "//.*",
"style": "Comment"
},
"keyw": {
"regex": "\\b(true|false|null)\\b",
"style": "IdKeyword"
},
"id": {
"regex": "[a-zA-Z_]\\w*",
"style": "Id"
},
"num": {
"regex": "\\d+(\\.\\d+)?([eE][-\\+]?\\d+)?",
"style": "Number"
},
"str2": {
"regex": "\".*?\"(?=\\x20*:)",
"style": "String2"
},
"str": {
"regex": "\"(\\\\\\\\|\\\\\"|.)*?\"",
"style": "String"
},
"sym": {
"regex": "[\\{\\}\\[\\]:,]",
"style": "Symbol"
}
}
}
|
{
"pile_set_name": "Github"
}
|
/***************************************************************************************/
// CRC32 code
/***************************************************************************************/
#define _CRC32_(crc, ch) (crc = (crc >> 8) ^ crc32tab[(crc ^ (ch)) & 0xff])
/* generated using the AUTODIN II polynomial
* x^32 + x^26 + x^23 + x^22 + x^16 +
* x^12 + x^11 + x^10 + x^8 + x^7 + x^5 + x^4 + x^2 + x^1 + 1
*/
static const unsigned long crc32tab[256] = {
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba,
0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3,
0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,
0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91,
0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec,
0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5,
0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,
0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940,
0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116,
0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f,
0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d,
0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a,
0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818,
0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457,
0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c,
0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb,
0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,
0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9,
0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086,
0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4,
0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad,
0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,
0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683,
0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe,
0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7,
0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,
0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252,
0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60,
0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79,
0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f,
0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04,
0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a,
0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,
0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21,
0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e,
0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45,
0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,
0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db,
0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0,
0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6,
0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf,
0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d,
};
unsigned long crc32(char *buf, size_t size)
{
unsigned long crc = (unsigned long)~0;
char *p;
size_t len, nr;
len = 0;
nr=size;
for (len += nr, p = buf; nr--; ++p)
{
_CRC32_(crc, *p);
}
return ~crc;
}
/***************************************************************************************/
// END OF CRC32 code
/***************************************************************************************/
|
{
"pile_set_name": "Github"
}
|
fileFormatVersion: 2
guid: fb912f796367c4cc3a2f8acbb62686d3
PluginImporter:
externalObjects: {}
serializedVersion: 2
iconMap: {}
executionOrder: {}
defineConstraints: []
isPreloaded: 0
isOverridable: 0
isExplicitlyReferenced: 0
validateReferences: 1
platformData:
- first:
Any:
second:
enabled: 1
settings: {}
- first:
Editor: Editor
second:
enabled: 0
settings:
DefaultValueInitialized: true
userData:
assetBundleName:
assetBundleVariant:
|
{
"pile_set_name": "Github"
}
|
#include <benchmark/benchmark.h>
#include <fp16.h>
#ifndef EMSCRIPTEN
#include <fp16/psimd.h>
#endif
#if (defined(__i386__) || defined(__x86_64__)) && defined(__F16C__)
#include <immintrin.h>
#endif
#ifdef FP16_COMPARATIVE_BENCHMARKS
#include <third-party/THHalf.h>
#include <third-party/npy-halffloat.h>
#include <third-party/eigen-half.h>
#include <third-party/float16-compressor.h>
#include <third-party/half.hpp>
#endif
static inline uint16_t next_xorshift16(uint16_t x) {
x ^= x >> 8;
x ^= x << 9;
x ^= x >> 5;
return x;
}
static inline uint32_t next_xorshift32(uint32_t x) {
x ^= x >> 13;
x ^= x << 17;
x ^= x >> 5;
return x;
}
#ifndef EMSCRIPTEN
PSIMD_INTRINSIC psimd_u16 next_xorshift16_psimd(psimd_u16 x) {
x ^= x >> psimd_splat_u16(8);
x ^= x << psimd_splat_u16(9);
x ^= x >> psimd_splat_u16(5);
return x;
}
#endif
/* Conversion from IEEE FP16 to IEEE FP32 */
static void fp16_ieee_to_fp32_bits(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
const uint32_t fp32 = fp16_ieee_to_fp32_bits(fp16);
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(fp16_ieee_to_fp32_bits);
static void fp16_ieee_to_fp32_value(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
const float fp32 = fp16_ieee_to_fp32_value(fp16);
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(fp16_ieee_to_fp32_value);
#ifndef EMSCRIPTEN
static void fp16_ieee_to_fp32_psimd(benchmark::State& state) {
psimd_u16 fp16 = (psimd_u16) { 0x7C00, 0x7C01, 0x7C02, 0x7C03 };
while (state.KeepRunning()) {
const psimd_f32 fp32 = fp16_ieee_to_fp32_psimd(fp16);
fp16 = next_xorshift16_psimd(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(fp16_ieee_to_fp32_psimd);
static void fp16_ieee_to_fp32x2_psimd(benchmark::State& state) {
psimd_u16 fp16 =
(psimd_u16) { 0x7C00, 0x7C01, 0x7C02, 0x7C03, 0x7C04, 0x7C05, 0x7C06, 0x7C07 };
while (state.KeepRunning()) {
const psimd_f32x2 fp32 = fp16_ieee_to_fp32x2_psimd(fp16);
fp16 = next_xorshift16_psimd(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(fp16_ieee_to_fp32x2_psimd);
#endif
#ifdef FP16_COMPARATIVE_BENCHMARKS
static void TH_halfbits2float(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
float fp32;
TH_halfbits2float(&fp16, &fp32);
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(TH_halfbits2float);
static void npy_halfbits_to_floatbits(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
const uint32_t fp32 = npy_halfbits_to_floatbits(fp16);
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(npy_halfbits_to_floatbits);
static void Eigen_half_to_float(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
const float fp32 =
Eigen::half_impl::half_to_float(
Eigen::half_impl::raw_uint16_to_half(fp16));
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(Eigen_half_to_float);
static void Float16Compressor_decompress(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
const float fp32 = Float16Compressor::decompress(fp16);
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(Float16Compressor_decompress);
static void half_float_detail_half2float_table(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
const float fp32 =
half_float::detail::half2float_impl(fp16,
half_float::detail::true_type());
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(half_float_detail_half2float_table);
static void half_float_detail_half2float_branch(benchmark::State& state) {
uint16_t fp16 = UINT16_C(0x7C00);
while (state.KeepRunning()) {
const float fp32 =
half_float::detail::half2float_impl(fp16,
half_float::detail::false_type());
fp16 = next_xorshift16(fp16);
benchmark::DoNotOptimize(fp32);
}
}
BENCHMARK(half_float_detail_half2float_branch);
#endif
/* Conversion from IEEE FP32 to IEEE FP16 */
static void fp16_ieee_from_fp32_value(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
const uint16_t fp16 = fp16_ieee_from_fp32_value(fp32_from_bits(fp32));
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(fp16_ieee_from_fp32_value);
#if (defined(__i386__) || defined(__x86_64__)) && defined(__F16C__)
static void fp16_ieee_from_fp32_hardware(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
const uint16_t fp16 = static_cast<uint16_t>(
_mm_cvtsi128_si32(_mm_cvtps_ph(_mm_set_ss(fp32), _MM_FROUND_CUR_DIRECTION)));
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(fp16_ieee_from_fp32_hardware);
#endif
#ifdef FP16_COMPARATIVE_BENCHMARKS
static void TH_float2halfbits(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
uint16_t fp16;
float fp32_value = fp32_from_bits(fp32);
TH_float2halfbits(&fp32_value, &fp16);
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(TH_float2halfbits);
static void npy_floatbits_to_halfbits(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
const uint16_t fp16 = npy_floatbits_to_halfbits(fp32);
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(npy_floatbits_to_halfbits);
static void Eigen_float_to_half_rtne(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
const Eigen::half_impl::__half fp16 =
Eigen::half_impl::float_to_half_rtne(
fp32_from_bits(fp32));
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(Eigen_float_to_half_rtne);
static void Float16Compressor_compress(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
const uint16_t fp16 = Float16Compressor::compress(fp32_from_bits(fp32));
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(Float16Compressor_compress);
static void half_float_detail_float2half_table(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
const uint16_t fp16 =
half_float::detail::float2half_impl<std::round_to_nearest>(
fp32_from_bits(fp32),
half_float::detail::true_type());
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(half_float_detail_float2half_table);
static void half_float_detail_float2half_branch(benchmark::State& state) {
uint32_t fp32 = UINT32_C(0x7F800000);
while (state.KeepRunning()) {
const uint16_t fp16 =
half_float::detail::float2half_impl<std::round_to_nearest>(
fp32_from_bits(fp32),
half_float::detail::false_type());
fp32 = next_xorshift32(fp32);
benchmark::DoNotOptimize(fp16);
}
}
BENCHMARK(half_float_detail_float2half_branch);
#endif
BENCHMARK_MAIN();
|
{
"pile_set_name": "Github"
}
|
= Alternation Syntax
Alternation syntax similar to that used in Unix shells may also be
used:
`%{%{Foo}:-bar}`
This code returns the value of `%{Foo}`, if it has a value.
Otherwise, it returns a literal string bar.
`%{%{Foo}:-%{Bar}}`
This code returns the value of `%{Foo}`, if it has a value.
Otherwise, it returns the expansion of `%{Bar}`.
These conditional expansions can be nested to almost any depth, such
as with `%{%{One}:-%{%{Two}:-%{Three}}}`.
.Examples
`%{%{Stripped-User-Name}:-%{User-Name}}` +
`%{%{Framed-IP-Address}:-<none>}`
// Copyright (C) 2020 Network RADIUS SAS. Licenced under CC-by-NC 4.0.
// Development of this documentation was sponsored by Network RADIUS SAS.
|
{
"pile_set_name": "Github"
}
|
;;; email.el --- Email functions
;; this provides gnus-dired-attach which allows you to mark files and
;; attach them to an email
;;; Commentary:
;;
(require 'gnus-dired)
;;; Code:
(defun email-region (start end)
"Send region as the body of an email."
(interactive "r")
(let ((content (buffer-substring start end)))
(compose-mail)
(message-goto-body)
(insert content)
(message-goto-to)))
(defun email-buffer ()
"Send region as the body of an email."
(interactive)
(let ((content (buffer-string)))
(compose-mail)
(message-goto-body)
(insert content)
(message-goto-to)))
(defvar *email-heading-point* nil
"Global variable to store point in for returning.")
(defvar *email-to-addresses* nil
"Global variable to store to address in email.")
(defun email-heading-return ()
"After returning from compose do this."
(switch-to-buffer (marker-buffer *email-heading-point*))
(goto-char (marker-position *email-heading-point*))
(setq *email-heading-point* nil)
(org-set-property "SENT-ON" (current-time-string))
;; reset this incase you added new ones
(org-set-property "TO" *email-to-addresses*)
)
(defun email-send-action ()
"Send action for `compose-mail'."
(setq *email-to-addresses* (mail-fetch-field "To")))
(defun email-heading ()
"Send the current org-mode heading as the body of an email, with headline as the subject.
use these properties
TO
CC
BCC
OTHER-HEADERS is an alist specifying additional
header fields. Elements look like (HEADER . VALUE) where both
HEADER and VALUE are strings.
Save when it was sent as a SENT property. this is overwritten on
subsequent sends."
(interactive)
; store location.
(setq *email-heading-point* (set-marker (make-marker) (point)))
(save-excursion
(org-mark-subtree)
(let ((content (buffer-substring (point) (mark)))
(TO (org-entry-get (point) "TO" t))
(CC (org-entry-get (point) "CC" t))
(BCC (org-entry-get (point) "BCC" t))
(SUBJECT (nth 4 (org-heading-components)))
(OTHER-HEADERS (eval (org-entry-get (point) "OTHER-HEADERS")))
(continue nil)
(switch-function nil)
(yank-action nil)
(send-actions '((email-send-action . nil)))
(return-action '(email-heading-return)))
(compose-mail TO SUBJECT OTHER-HEADERS continue switch-function yank-action send-actions return-action)
(message-goto-body)
(insert content)
(when CC
(message-goto-cc)
(insert CC))
(when BCC
(message-goto-bcc)
(insert BCC))
(if TO
(message-goto-body)
(message-goto-to)))))
(defun email-region-as-attachment (start end)
"Send the region as an attachment in an email.
Argument START start of region.
Argument END end of region."
(interactive "r")
(save-restriction
(narrow-to-region start end)
(let ((content (buffer-substring start end))
(cb (buffer-name))
)
(set-buffer (get-buffer-create "*org-email-region*"))
(org-mode)
(insert content)
;(org-ref-extract-bibtex-entries)
(compose-mail-other-frame TO SUBJECT OTHER-HEADERS)
(mml-attach-buffer "*org-email-region*")
(message-goto-to))))
(defun email-bibtex-entry ()
"Email current bibtex entry and pdf if it exists."
(interactive)
(save-excursion
(bibtex-beginning-of-entry)
(let* ((key (reftex-get-bib-field "=key=" (bibtex-parse-entry t)))
(pdf (expand-file-name
(concat key ".pdf")
org-ref-pdf-directory)))
(bibtex-copy-entry-as-kill)
(compose-mail)
(message-goto-body)
(insert (pop bibtex-entry-kill-ring))
(message-goto-subject)
(insert key)
(message "%s exists %s" pdf (file-exists-p pdf))
(when (file-exists-p pdf)
(mml-attach-file pdf))
(message-goto-to))))
(provide 'email)
;;; email.el ends here
|
{
"pile_set_name": "Github"
}
|
SUBROUTINE ccsdt_lr_alpha_offdiag_8(d_a,k_a_offset,d_b,k_b_offset,
&d_c,k_c_offset)
C $Id$
C This is a Fortran77 program generated by Tensor Contraction Engine v.1.0
C Copyright (c) Battelle & Pacific Northwest National Laboratory (2002)
C i0 ( )_vba + = 1/2 * Sum ( h2 p1 ) * a ( p1 h2 )_a * i1 ( h2 p1 )_vb
IMPLICIT NONE
#include "global.fh"
#include "mafdecls.fh"
#include "sym.fh"
#include "errquit.fh"
#include "tce.fh"
INTEGER d_a
INTEGER k_a_offset
INTEGER d_b
INTEGER k_b_offset
INTEGER d_c
INTEGER k_c_offset
INTEGER nxtask
INTEGER next
INTEGER nprocs
INTEGER count
INTEGER dimc
INTEGER l_c_sort
INTEGER k_c_sort
INTEGER p1b
INTEGER h2b
INTEGER p1b_1
INTEGER h2b_1
INTEGER h2b_2
INTEGER p1b_2
INTEGER dim_common
INTEGER dima_sort
INTEGER dima
INTEGER dimb_sort
INTEGER dimb
INTEGER l_a_sort
INTEGER k_a_sort
INTEGER l_a
INTEGER k_a
INTEGER l_b_sort
INTEGER k_b_sort
INTEGER l_b
INTEGER k_b
INTEGER l_c
INTEGER k_c
EXTERNAL nxtask
nprocs = GA_NNODES()
count = 0
next = nxtask(nprocs,1)
IF (next.eq.count) THEN
IF (0 .eq. ieor(irrep_v,ieor(irrep_b,irrep_a))) THEN
dimc = 1
IF (.not.MA_PUSH_GET(mt_dbl,dimc,'noname',l_c_sort,k_c_sort)) CALL
& ERRQUIT('ccsdt_lr_alpha_offdiag_8',0,MA_ERR)
CALL DFILL(dimc,0.0d0,dbl_mb(k_c_sort),1)
DO p1b = noab+1,noab+nvab
DO h2b = 1,noab
IF (int_mb(k_spin+p1b-1) .eq. int_mb(k_spin+h2b-1)) THEN
IF (ieor(int_mb(k_sym+p1b-1),int_mb(k_sym+h2b-1)) .eq. irrep_a) TH
&EN
CALL TCE_RESTRICTED_2(p1b,h2b,p1b_1,h2b_1)
CALL TCE_RESTRICTED_2(h2b,p1b,h2b_2,p1b_2)
dim_common = int_mb(k_range+p1b-1) * int_mb(k_range+h2b-1)
dima_sort = 1
dima = dim_common * dima_sort
dimb_sort = 1
dimb = dim_common * dimb_sort
IF ((dima .gt. 0) .and. (dimb .gt. 0)) THEN
IF (.not.MA_PUSH_GET(mt_dbl,dima,'noname',l_a_sort,k_a_sort)) CALL
& ERRQUIT('ccsdt_lr_alpha_offdiag_8',1,MA_ERR)
IF (.not.MA_PUSH_GET(mt_dbl,dima,'noname',l_a,k_a)) CALL ERRQUIT('
&ccsdt_lr_alpha_offdiag_8',2,MA_ERR)
CALL GET_HASH_BLOCK(d_a,dbl_mb(k_a),dima,int_mb(k_a_offset),(h2b_1
& - 1 + noab * (p1b_1 - noab - 1)))
CALL TCE_SORT_2(dbl_mb(k_a),dbl_mb(k_a_sort),int_mb(k_range+p1b-1)
&,int_mb(k_range+h2b-1),2,1,1.0d0)
IF (.not.MA_POP_STACK(l_a)) CALL ERRQUIT('ccsdt_lr_alpha_offdiag_8
&',3,MA_ERR)
IF (.not.MA_PUSH_GET(mt_dbl,dimb,'noname',l_b_sort,k_b_sort)) CALL
& ERRQUIT('ccsdt_lr_alpha_offdiag_8',4,MA_ERR)
IF (.not.MA_PUSH_GET(mt_dbl,dimb,'noname',l_b,k_b)) CALL ERRQUIT('
&ccsdt_lr_alpha_offdiag_8',5,MA_ERR)
CALL GET_HASH_BLOCK(d_b,dbl_mb(k_b),dimb,int_mb(k_b_offset),(p1b_2
& - noab - 1 + nvab * (h2b_2 - 1)))
CALL TCE_SORT_2(dbl_mb(k_b),dbl_mb(k_b_sort),int_mb(k_range+h2b-1)
&,int_mb(k_range+p1b-1),1,2,1.0d0)
IF (.not.MA_POP_STACK(l_b)) CALL ERRQUIT('ccsdt_lr_alpha_offdiag_8
&',6,MA_ERR)
CALL DGEMM('T','N',dima_sort,dimb_sort,dim_common,1.0d0,dbl_mb(k_a
&_sort),dim_common,dbl_mb(k_b_sort),dim_common,1.0d0,dbl_mb(k_c_sor
&t),dima_sort)
IF (.not.MA_POP_STACK(l_b_sort)) CALL ERRQUIT('ccsdt_lr_alpha_offd
&iag_8',7,MA_ERR)
IF (.not.MA_POP_STACK(l_a_sort)) CALL ERRQUIT('ccsdt_lr_alpha_offd
&iag_8',8,MA_ERR)
END IF
END IF
END IF
END DO
END DO
IF (.not.MA_PUSH_GET(mt_dbl,dimc,'noname',l_c,k_c)) CALL ERRQUIT('
&ccsdt_lr_alpha_offdiag_8',9,MA_ERR)
CALL TCE_SORT_0(dbl_mb(k_c_sort),dbl_mb(k_c),1.0d0/2.0d0)
CALL ADD_HASH_BLOCK(d_c,dbl_mb(k_c),dimc,int_mb(k_c_offset),0)
IF (.not.MA_POP_STACK(l_c)) CALL ERRQUIT('ccsdt_lr_alpha_offdiag_8
&',10,MA_ERR)
IF (.not.MA_POP_STACK(l_c_sort)) CALL ERRQUIT('ccsdt_lr_alpha_offd
&iag_8',11,MA_ERR)
END IF
next = nxtask(nprocs,1)
END IF
count = count + 1
next = nxtask(-nprocs,1)
call GA_SYNC()
RETURN
END
|
{
"pile_set_name": "Github"
}
|
"Basic assertion functions"
by("Gavin King","Enrique Zamudio","Ivo Kasiuk")
shared package check;
|
{
"pile_set_name": "Github"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.