content
stringlengths 5
1.04M
| avg_line_length
float64 1.75
12.9k
| max_line_length
int64 2
244k
| alphanum_fraction
float64 0
0.98
| licenses
sequence | repository_name
stringlengths 7
92
| path
stringlengths 3
249
| size
int64 5
1.04M
| lang
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Vuforia;
public class btSetaCimaScript : MonoBehaviour, IVirtualButtonEventHandler
{
private GameObject btSetaCima;
private GameObject placaCima;
// Use this for initialization
void Start()
{
btSetaCima = GameObject.Find("btSetaCima");
placaCima = GameObject.Find("placaCima");
placaCima.SetActive(false);
btSetaCima.GetComponent<VirtualButtonAbstractBehaviour>().RegisterEventHandler(this);
}
public void OnButtonPressed(VirtualButtonAbstractBehaviour vb)
{
placaCima.SetActive(true);
}
public void OnButtonReleased(VirtualButtonAbstractBehaviour vb)
{
placaCima.SetActive(false);
}
}
| 24.96875 | 94 | 0.69587 | [
"Apache-2.0"
] | wellisonraul/PIBIC | JogosDasSetas/Assets/btSetaCimaScript.cs | 801 | C# |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Diagnostics.ContractsLight;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using BuildXL.Cache.ContentStore.Hashing;
using BuildXL.Engine;
using BuildXL.Engine.Cache;
using BuildXL.Engine.Cache.Artifacts;
using BuildXL.Engine.Cache.Fingerprints;
using BuildXL.Engine.Cache.Fingerprints.TwoPhase;
using BuildXL.Ipc.Common;
using BuildXL.Ipc.Interfaces;
using BuildXL.Native.IO;
using BuildXL.Pips;
using BuildXL.Pips.Operations;
using BuildXL.Processes;
using BuildXL.Scheduler;
using BuildXL.Scheduler.Fingerprints;
using BuildXL.Scheduler.Tracing;
using BuildXL.Storage;
using BuildXL.Storage.Fingerprints;
using BuildXL.Utilities;
using BuildXL.Utilities.Collections;
using BuildXL.Utilities.Configuration;
using BuildXL.Utilities.Configuration.Mutable;
using BuildXL.Utilities.Tracing;
using Test.BuildXL.Processes;
using Test.BuildXL.Scheduler.Utils;
using Test.BuildXL.TestUtilities.Xunit;
using Xunit;
using Xunit.Abstractions;
using static BuildXL.Utilities.FormattableStringEx;
using Process = BuildXL.Pips.Operations.Process;
using WriteFilePip = BuildXL.Pips.Operations.WriteFile;
using ProcessesLogEventId = BuildXL.Processes.Tracing.LogEventId;
using OperationHints = BuildXL.Cache.ContentStore.Interfaces.Sessions.OperationHints;
namespace Test.BuildXL.Scheduler
{
[Trait("Category", "PipExecutorTest")]
public sealed class PipExecutorTest : TemporaryStorageTestBase
{
[DllImport("libc", SetLastError = true)]
private static extern int chmod(string path, int mode);
private static readonly string TestPath = OperatingSystemHelper.IsUnixOS
? "/tmp/TestPath/test"
: @"\\TestPath\test";
public PipExecutorTest(ITestOutputHelper output) : base(output)
{
RegisterEventSource(global::BuildXL.Scheduler.ETWLogger.Log);
RegisterEventSource(global::BuildXL.Pips.ETWLogger.Log);
RegisterEventSource(global::BuildXL.Processes.ETWLogger.Log);
RegisterEventSource(global::BuildXL.Engine.Cache.ETWLogger.Log);
}
[Fact]
public Task WriteFile()
{
return WithExecutionEnvironment(
async env =>
{
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
PipData contents = PipDataBuilder.CreatePipData(
env.Context.StringTable,
" ",
PipDataFragmentEscaping.CRuntimeArgumentRules,
"Success");
var pip = new WriteFile(destinationArtifact, contents, WriteFileEncoding.Utf8, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
string actual = File.ReadAllText(destination);
XAssert.AreEqual("Success", actual);
});
}
[Fact]
public Task WriteFileSkippedIfUpToDate()
{
return WithExecutionEnvironment(
async env =>
{
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
PipData contents = PipDataBuilder.CreatePipData(
env.Context.StringTable,
" ",
PipDataFragmentEscaping.CRuntimeArgumentRules,
"Success");
var pip = new WriteFile(destinationArtifact, contents, WriteFileEncoding.Utf8, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
await VerifyPipResult(PipResultStatus.UpToDate, env, pip);
await VerifyPipResult(PipResultStatus.UpToDate, env, pip);
string actual = File.ReadAllText(destination);
XAssert.AreEqual("Success", actual);
});
}
[Fact]
public async Task WriteFilePathTooLong()
{
await WithExecutionEnvironment(
async env =>
{
string destination = GetFullPath(Path.Combine(new string('a', 260), "file.txt"));
AbsolutePath destinationPath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationPath).CreateNextWrittenVersion();
PipData contents = PipDataBuilder.CreatePipData(
env.Context.StringTable,
" ",
PipDataFragmentEscaping.CRuntimeArgumentRules,
"Success");
var pip = new WriteFile(destinationArtifact, contents, WriteFileEncoding.Utf8, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
await VerifyPipResult(PipResultStatus.Failed, env, pip);
});
SetExpectedFailures(1, 0, "DX0006");
}
[Fact]
public Task CopyFile()
{
return WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourceAbsolutePath);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
File.WriteAllText(source, "Success");
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
XAssert.IsTrue(sourceArtifact.IsSourceFile,
"Source artifact must be a 'source' file (write count 0) since CopyFile conditionally stores content to the cache (intermediate content should be stored already when it runs)");
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
string actual = File.ReadAllText(destination);
XAssert.AreEqual("Success", actual);
});
}
[TheoryIfSupported(requiresSymlinkPermission: true)]
[MemberData(nameof(TruthTable.GetTable), 2, MemberType = typeof(TruthTable))]
public async Task CopySymlinkTest(bool allowCopySymlink, bool storeOutputsToCache)
{
await WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source.link");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourceAbsolutePath);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
string sourceTarget = GetFullPath("source.txt");
File.WriteAllText(sourceTarget, "Success");
XAssert.PossiblySucceeded(FileUtilities.TryCreateSymbolicLink(source, sourceTarget, true), "Unable to create symlink");
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
await VerifyPipResult(allowCopySymlink ? PipResultStatus.Succeeded : PipResultStatus.Failed, env, pip);
if (allowCopySymlink)
{
string actual = File.ReadAllText(destination);
XAssert.AreEqual("Success", actual);
}
},
config: pathTable => GetConfiguration(pathTable, allowCopySymlink: allowCopySymlink, storeOutputsToCache: storeOutputsToCache));
if (!allowCopySymlink)
{
SetExpectedFailures(1, 0, "DX0008");
}
}
[Fact]
public Task CopyFileWhenSourceIsIntermediate()
{
return WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourceAbsolutePath).CreateNextWrittenVersion();
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
// The copy-source is an intermediate file. We need to respect the invariant that on-disk intermediate files are
// also in cache.
File.WriteAllText(source, "Success");
var possiblyStored = await env.LocalDiskContentStore.TryStoreAsync(
env.Cache.ArtifactContentCache,
FileRealizationMode.Copy,
sourceAbsolutePath,
tryFlushPageCacheToFileSystem: true);
if (!possiblyStored.Succeeded)
{
XAssert.Fail("Failed to store copy-source to cache: {0}", possiblyStored.Failure.DescribeIncludingInnerFailures());
}
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
XAssert.IsTrue(sourceArtifact.IsOutputFile,
"Source artifact must be an 'intermediate' file (write count > 0) since CopyFile conditionally stores content to the cache (intermediate content should be stored already when it runs)");
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
string actual = File.ReadAllText(destination);
XAssert.AreEqual("Success", actual);
});
}
[Fact]
public async Task CopyFileDestinationDirectoryPathTooLong()
{
await WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourcePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourcePath);
string destination = GetFullPath(Path.Combine(new string('a', 260), "file.txt"));
AbsolutePath destinationPath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationPath).CreateNextWrittenVersion();
File.WriteAllText(source, "Success");
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
XAssert.IsTrue(sourceArtifact.IsSourceFile,
"Source artifact must be a 'source' file (write count 0) since CopyFile conditionally stores content to the cache (intermediate content should be stored already when it runs)");
await VerifyPipResult(PipResultStatus.Failed, env, pip);
});
SetExpectedFailures(1, 1, "DX0737", "DX0008");
}
[Fact]
public Task CopyFileSkippedIfUpToDate()
{
const string Contents = "Matches!";
return WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourceAbsolutePath);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
File.WriteAllText(destination, Contents);
File.WriteAllText(source, Contents);
// We need the destination in the file content table to know its up to date.
await env.FileContentTable.GetAndRecordContentHashAsync(destination);
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
XAssert.IsTrue(sourceArtifact.IsSourceFile,
"Source artifact must be a 'source' file (write count 0) since CopyFile conditionally stores content to the cache (intermediate content should be stored already when it runs)");
await VerifyPipResult(PipResultStatus.UpToDate, env, pip);
string actual = File.ReadAllText(destination);
XAssert.AreEqual(Contents, actual);
});
}
[Fact]
public Task CopyFileSkippedIfUpToDateViaPreviousCopy()
{
const string Contents = "Matches!";
return WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourceAbsolutePath);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
File.WriteAllText(destination, Contents);
File.WriteAllText(source, Contents);
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
XAssert.IsTrue(sourceArtifact.IsSourceFile,
"Source artifact must be a 'source' file (write count 0) since CopyFile conditionally stores content to the cache (intermediate content should be stored already when it runs)");
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
await VerifyPipResult(PipResultStatus.UpToDate, env, pip);
await VerifyPipResult(PipResultStatus.UpToDate, env, pip);
string actual = File.ReadAllText(destination);
XAssert.AreEqual(Contents, actual);
});
}
[Fact]
public Task CopyFileProceedsIfMismatched()
{
const string Contents = "Matches!";
// It's important that BadContents is longer to make sure truncation occurs.
const string BadContents = "Anti-Matches!";
return WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourceAbsolutePath);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
XAssert.IsTrue(sourceArtifact.IsSourceFile,
"Source artifact must be a 'source' file (write count 0) since CopyFile conditionally stores content to the cache (intermediate content should be stored already when it runs)");
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
string actual = File.ReadAllText(destination);
XAssert.AreEqual(Contents, actual);
});
}
[Fact]
public Task CopyFileStoreNoOutputToCacheMaterializeDestination()
{
const string Contents = nameof(CopyFileStoreNoOutputToCacheMaterializeDestination);
return WithExecutionEnvironment(
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
FileArtifact sourceArtifact = FileArtifact.CreateSourceFile(sourceAbsolutePath);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateOutputFile(destinationAbsolutePath);
File.WriteAllText(source, Contents);
var pip = new CopyFile(sourceArtifact, destinationArtifact, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context));
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
XAssert.AreEqual(PipOutputOrigin.Produced, env.State.FileContentManager.GetPipOutputOrigin(destinationArtifact));
await VerifyPipResult(PipResultStatus.UpToDate, env, pip);
XAssert.AreEqual(PipOutputOrigin.UpToDate, env.State.FileContentManager.GetPipOutputOrigin(destinationArtifact));
await VerifyPipResult(PipResultStatus.UpToDate, env, pip);
XAssert.AreEqual(PipOutputOrigin.UpToDate, env.State.FileContentManager.GetPipOutputOrigin(destinationArtifact));
string actual = File.ReadAllText(destination);
XAssert.AreEqual(Contents, actual);
},
config: pathTable => GetConfiguration(pathTable, storeOutputsToCache: false));
}
[Fact]
public Task ProcessWithoutCache()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath);
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
XAssert.AreEqual(1, env.OutputFilesProduced, "produced count");
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
XAssert.AreEqual(2, env.OutputFilesProduced, "produced count");
string actual = File.ReadAllText(destination);
XAssert.AreEqual(Contents, actual);
});
}
[Fact]
public Task TestServicePipReceivesAggregatedPermissions()
{
return WithExecutionEnvironment(
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
string destination2 = GetFullPath("dest2");
AbsolutePath destinationAbsolutePath2 = AbsolutePath.Create(env.Context.PathTable, destination2);
File.WriteAllText(source, "123");
// because omitDependencies: true, 'servicePip' would fail if it didn't receive permissions of 'clientPip'
Process servicePip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, omitDependencies: true, serviceInfo: ServiceInfo.Service(new PipId(324)));
Process clientPip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath2, serviceInfo: ServiceInfo.ServiceClient(new[] { servicePip.PipId }));
env.SetServicePipClients(new Dictionary<PipId, IReadOnlyCollection<Pip>>
{
[servicePip.PipId] = new[] { clientPip }
});
var testRunChecker = new TestRunChecker();
await testRunChecker.VerifySucceeded(env, servicePip);
await testRunChecker.VerifySucceeded(env, clientPip);
});
}
[Fact]
public Task TestSucceedingIpcPip()
{
var ipcProvider = new DummyIpcProvider(statusToAlwaysReturn: IpcResultStatus.Success);
return TestIpcPip(ipcProvider, true);
}
[Fact]
public Task TestFailingIpcPip()
{
var ipcProvider = new DummyIpcProvider(statusToAlwaysReturn: IpcResultStatus.GenericError);
return TestIpcPip(ipcProvider, false);
}
private Task TestIpcPip(IIpcProvider ipcProvider, bool expectedToSucceed)
{
var ipcOperationPayload = "hi";
return WithExecutionEnvironmentAndIpcServer(
ipcProvider,
ipcExecutor: new LambdaIpcOperationExecutor((op) => IpcResult.Success(op.Payload)), // the server echoes whatever operation payload it receives
act: async (env, moniker, ipcServer) => // ipcServer has been started; ipc pips will connect to it;
{
var workingDir = AbsolutePath.Create(env.Context.PathTable, GetFullPath("ipc-wd"));
// construct an IPC pip
var ipcInfo = new IpcClientInfo(moniker.ToStringId(env.Context.StringTable), new ClientConfig());
var ipcPip = AssignFakePipId(IpcPip.CreateFromStringPayload(env.Context, workingDir, ipcInfo, ipcOperationPayload, PipProvenance.CreateDummy(env.Context)));
var expectedOutputFile = ipcPip.OutputFile.Path.ToString(env.Context.PathTable);
var expectedOutputFileContent = ipcOperationPayload;
// execute pip several times
if (expectedToSucceed)
{
var testRunChecker = new TestRunChecker(expectedOutputFile);
await testRunChecker.VerifySucceeded(env, ipcPip, expectedOutputFileContent, expectMarkedPerpetuallyDirty: true);
await testRunChecker.VerifyUpToDate(env, ipcPip, expectedOutputFileContent, expectMarkedPerpertuallyDirty: true);
File.Delete(expectedOutputFile);
await testRunChecker.VerifySucceeded(env, ipcPip, expectedOutputFileContent, expectMarkedPerpetuallyDirty: true);
}
else
{
var testRunChecker = new TestRunChecker();
await testRunChecker.VerifyFailed(env, ipcPip, expectMarkedPerpertuallyDirty: true);
AssertErrorEventLogged(LogEventId.PipIpcFailed, count: 1);
await testRunChecker.VerifyFailed(env, ipcPip, expectMarkedPerpertuallyDirty: true);
AssertErrorEventLogged(LogEventId.PipIpcFailed, count: 1);
}
},
cache: InMemoryCacheFactory.Create);
}
[Fact]
public Task ProcessWithCache()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, Contents);
await testRunChecker.VerifyUpToDate(env, pip, Contents);
File.Delete(destination);
await testRunChecker.VerifyDeployedFromCache(env, pip, Contents);
});
}
private static IConfiguration GetUnsafeOptions(PathTable pathTable)
=> GetConfiguration(pathTable, monitorFileAccesses: true, unexpectedFileAccessesAreErrors: false);
private static IConfiguration GetSaferOptions(PathTable pathTable)
=> GetConfiguration(pathTable, monitorFileAccesses: true, unexpectedFileAccessesAreErrors: true);
[Fact]
public async Task ExecutingProcessWithLessSafeSandboxOptionsShouldGetCacheHit()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
var context = BuildXLContext.CreateInstanceForTesting();
var cache = InMemoryCacheFactory.Create();
string source = GetFullPath("source");
string destination = GetFullPath("dest");
File.WriteAllText(source, Contents);
File.WriteAllText(destination, BadContents);
Func<IPipExecutionEnvironment, Process> createPip = (env) =>
{
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
return CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath);
};
// run the pip with "safer options" (monitorFileAccesses: true) --> expect it to succeed
var env1 = CreateExecutionEnvironment(context, cache: () => cache, config: GetSaferOptions);
await new TestRunChecker(destination).VerifySucceeded(env1, createPip(env1), Contents);
// run the pip with "less safe options" (monitorFileAccesses: false) --> expect cache hit
var env2 = CreateExecutionEnvironment(context, cache: () => cache, config: GetUnsafeOptions);
await new TestRunChecker(destination).VerifyDeployedFromCache(env2, createPip(env2), Contents);
}
[Fact]
public async Task ExecutingProcessWithMoreSafeSandboxOptionsShouldGetCacheMiss()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
var context = BuildXLContext.CreateInstanceForTesting();
var cache = InMemoryCacheFactory.Create();
string source = GetFullPath("source");
string destination = GetFullPath("dest");
File.WriteAllText(source, Contents);
File.WriteAllText(destination, BadContents);
Func<IPipExecutionEnvironment, Process> createPip = (env) =>
{
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
return CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath);
};
// run the pip with "unsafe options" (monitorFileAccesses: false, unexpecteFileAccessesAreErrors: false) --> expect it to succeed
var env1 = CreateExecutionEnvironment(context, cache: () => cache, config: GetUnsafeOptions);
await new TestRunChecker(destination).VerifySucceeded(env1, createPip(env1), Contents);
// run the pip with "safer options" (monitorFileAccesses: true, unexpecteFileAccessesAreErrors: false) --> expect cache miss
var env2 = CreateExecutionEnvironment(context, cache: () => cache, config: GetSaferOptions);
var pip2 = createPip(env2);
var testChecker = new TestRunChecker(destination);
await testChecker.VerifySucceeded(env2, pip2, Contents);
// run the same pip again against the same environment --> expect 'UpToDate' cache hit
await testChecker.VerifyUpToDate(env2, pip2, Contents);
// run the same pip again against a new environment with the same unsafe options --> expect 'DeployedFromCache' cache hit
var env3 = CreateExecutionEnvironment(context, cache: () => cache, config: GetSaferOptions);
await new TestRunChecker(destination).VerifyDeployedFromCache(env3, createPip(env3), Contents);
}
[Fact]
public async Task ExecutingProcessWithPreserveOutputOnShouldGetCacheHit()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
var context = BuildXLContext.CreateInstanceForTesting();
var cache = InMemoryCacheFactory.Create();
string source = GetFullPath("source");
string destination = GetFullPath("dest");
File.WriteAllText(source, Contents);
File.WriteAllText(destination, BadContents);
Func<IPipExecutionEnvironment, Process> createPip = (env) =>
{
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
return CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, options: Process.Options.AllowPreserveOutputs);
};
// run the pip with "safer options" (preserveOutputs: disabled) --> expect it to succeed
var env1 = CreateExecutionEnvironment(
context,
cache: () => cache,
config: pt => GetConfiguration(pt, preserveOutputs: PreserveOutputsMode.Disabled));
await new TestRunChecker(destination).VerifySucceeded(env1, createPip(env1), Contents);
// run the pip with "less safe options" (preserveOutputs: enabled) --> expect cache hit
var env2 = CreateExecutionEnvironment(
context,
cache: () => cache,
config: pt => GetConfiguration(pt, preserveOutputs: PreserveOutputsMode.Enabled));
await new TestRunChecker(destination).VerifyUpToDate(env2, createPip(env2), Contents);
}
[Fact]
public async Task ExecutingProcessWithDifferentPreserveOutputSaltShouldGetCacheMiss()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
var context = BuildXLContext.CreateInstanceForTesting();
var cache = InMemoryCacheFactory.Create();
string source = GetFullPath("source");
string destination = GetFullPath("dest");
File.WriteAllText(source, Contents);
File.WriteAllText(destination, BadContents);
Func<IPipExecutionEnvironment, Process> createPip = (env) =>
{
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
return CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, options: Process.Options.AllowPreserveOutputs);
};
// run the pip with (preserveOutputs: enabled) --> expect it to succeed
var env1 = CreateExecutionEnvironment(
context,
cache: () => cache,
config: pt => GetConfiguration(pt, preserveOutputs: PreserveOutputsMode.Enabled));
var pip1 = createPip(env1);
var testChecker = new TestRunChecker(destination);
await testChecker.VerifySucceeded(env1, pip1, Contents);
// run the same pip again against the same environment --> expect 'UpToDate' cache hit
await testChecker.VerifyUpToDate(env1, pip1, Contents);
// run the pip with different preserve outputs salt (preserveOutputs: enabled) --> expect cache miss
var env2 = CreateExecutionEnvironment(
context,
cache: () => cache,
config: pt => GetConfiguration(pt, preserveOutputs: PreserveOutputsMode.Enabled));
await new TestRunChecker(destination).VerifySucceeded(env2, createPip(env2), Contents);
// run the pip with (preserveOutputs: disabled) --> expect cache miss
var env3 = CreateExecutionEnvironment(
context,
cache: () => cache,
config: pt => GetConfiguration(pt, preserveOutputs: PreserveOutputsMode.Disabled));
await new TestRunChecker(destination).VerifySucceeded(env3, createPip(env3), Contents);
}
[Fact]
public Task TemporaryOutputsAreNotStoredInCacheIfAbsent()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
string temporaryOutput = GetFullPath("tempOutput");
AbsolutePath temporaryOutputAbsolutePath = AbsolutePath.Create(env.Context.PathTable, temporaryOutput);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, temporaryOutput: temporaryOutputAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, Contents);
await testRunChecker.VerifyUpToDate(env, pip, Contents);
File.Delete(destination);
await testRunChecker.VerifyDeployedFromCache(env, pip, Contents);
});
}
[Fact]
public Task TemporaryOutputsAreNotStoredInCacheIfPresent()
{
const string Contents = "Matches!";
const string TempContents = "TempMatches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
string temporaryOutput = GetFullPath("tempOutput");
AbsolutePath temporaryOutputAbsolutePath = AbsolutePath.Create(env.Context.PathTable, temporaryOutput);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
File.WriteAllText(temporaryOutput, TempContents);
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, temporaryOutput: temporaryOutputAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, Contents);
await testRunChecker.VerifyUpToDate(env, pip, Contents);
File.Delete(destination);
File.Delete(temporaryOutput);
await testRunChecker.VerifyDeployedFromCache(env, pip, Contents);
// Temporary outputs are not stored in cache, so should be only one OutputFilesProduced!
XAssert.IsFalse(File.Exists(temporaryOutput), "Temporary output should not be recovered from cache");
});
}
[Fact]
public Task ProcessWithCacheAndOutputsForcedWritable()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
env.InMemoryContentCache.ReinitializeRealizationModeTracking();
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
// We add an option to opt-out this single pip from hardlinking.
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, options: Process.Options.OutputsMustRemainWritable);
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, Contents);
await testRunChecker.VerifyUpToDate(env, pip, Contents);
// The file should be writable (i.e., copy realization mode), since we opted out of hardlinking. So we can modify it, and expect the pip to re-run.
XAssert.AreEqual(FileRealizationMode.Copy, env.InMemoryContentCache.GetRealizationMode(destination));
env.InMemoryContentCache.ReinitializeRealizationModeTracking();
try
{
File.WriteAllText(destination, BadContents);
}
catch (UnauthorizedAccessException)
{
XAssert.Fail("Failed writing to the destination file. This implies that the outputs were not left writable, despite requesting that via Process.Options.OutputsMustRemainWritable");
}
// Now we re-run with read-only outputs allowed. This should be a cache hit (this option doesn't affect execution), leaving a read-only output.
Process hardlinkingPip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, options: Process.Options.None);
await testRunChecker.VerifyDeployedFromCache(env, hardlinkingPip, Contents);
// The file should not be writable (i.e., not copy realization mode) this time.
XAssert.AreNotEqual(FileRealizationMode.Copy, env.InMemoryContentCache.GetRealizationMode(destination));
env.InMemoryContentCache.ReinitializeRealizationModeTracking();
// Now re-run as a cache miss (should still be read-only; this is the production rather than deployment path).
File.WriteAllText(source, BadContents);
await testRunChecker.VerifySucceeded(env, hardlinkingPip, BadContents);
// The file should not be writable (i.e., not copy realization mode) this time.
XAssert.AreNotEqual(FileRealizationMode.Copy, env.InMemoryContentCache.GetRealizationMode(destination));
env.InMemoryContentCache.ReinitializeRealizationModeTracking();
});
}
[Fact]
public async Task ProcessWarningWithCache()
{
const string BadContents = "Anti-Matches!";
string Expected = "WARNING" + Environment.NewLine;
await WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string workingDirectory = GetFullPath("work");
AbsolutePath workingDirectoryAbsolutePath = AbsolutePath.Create(env.Context.PathTable, workingDirectory);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
Process pip = CreateWarningProcess(env.Context, workingDirectoryAbsolutePath, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
testRunChecker.ExpectWarning();
await testRunChecker.VerifySucceeded(env, pip, Expected);
testRunChecker.ExpectWarningFromCache();
await testRunChecker.VerifyUpToDate(env, pip, Expected);
File.Delete(destination);
testRunChecker.ExpectWarningFromCache();
await testRunChecker.VerifyDeployedFromCache(env, pip, Expected);
},
null,
pathTable => GetConfiguration(pathTable, enableLazyOutputs: false));
SetExpectedFailures(0, 3, "DX0065");
}
[Fact]
public async Task ProcessWarningWithCacheAndWarnAsError()
{
const string BadContents = "Anti-Matches!";
string Expected = "WARNING" + Environment.NewLine;
await WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string workingDirectory = GetFullPath("work");
AbsolutePath workingDirectoryAbsolutePath = AbsolutePath.Create(env.Context.PathTable, workingDirectory);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
Process pip = CreateWarningProcess(env.Context, workingDirectoryAbsolutePath, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
testRunChecker.ExpectWarning();
await testRunChecker.VerifySucceeded(env, pip, Expected, expectMarkedPerpetuallyDirty: true);
// This warning should not come from the cache since /warnaserror is enabled
testRunChecker.ExpectWarning();
await testRunChecker.VerifySucceeded(env, pip, Expected, expectMarkedPerpetuallyDirty: true);
},
null,
(pathTable) =>
{
var config2 = ConfigurationHelpers.GetDefaultForTesting(pathTable, AbsolutePath.Create(pathTable, TestPath));
config2.Logging.TreatWarningsAsErrors = true;
return config2;
});
AssertWarningEventLogged(ProcessesLogEventId.PipProcessWarning, 2);
AssertInformationalEventLogged(global::BuildXL.Scheduler.Tracing.LogEventId.ScheduleProcessNotStoredToWarningsUnderWarnAsError, 2);
}
/// <summary>
/// This test produces a failing process (determined by process return code) that is equipped with custom regex.
/// Test verifies that
/// 1) Process indeed fails.
/// 2) Error log only contains the pattern that matches to regex.
/// </summary>
[Theory]
[InlineData(OutputReportingMode.FullOutputAlways, 0)]
[InlineData(OutputReportingMode.FullOutputOnError, 0)]
[InlineData(OutputReportingMode.FullOutputOnWarningOrError, 0)]
[InlineData(OutputReportingMode.TruncatedOutputOnError, 0)]
[InlineData(OutputReportingMode.FullOutputAlways, 2 * SandboxedProcessPipExecutor.MaxConsoleLength)]
[InlineData(OutputReportingMode.FullOutputOnError, 2 * SandboxedProcessPipExecutor.MaxConsoleLength)]
[InlineData(OutputReportingMode.FullOutputOnWarningOrError, 2 * SandboxedProcessPipExecutor.MaxConsoleLength)]
[InlineData(OutputReportingMode.TruncatedOutputOnError, 2 * SandboxedProcessPipExecutor.MaxConsoleLength)]
[InlineData(OutputReportingMode.FullOutputOnError, 2 * SandboxedProcessPipExecutor.MaxConsoleLength, false)]
[InlineData(OutputReportingMode.TruncatedOutputOnError, 2 * SandboxedProcessPipExecutor.MaxConsoleLength, true)]
public Task FailingProcessWithErrorRegex(OutputReportingMode outputReportingMode, int errorMessageLength, bool regexMatchesSomething = true)
{
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string workingDirectory = GetFullPath("work");
AbsolutePath workingDirectoryAbsolutePath = AbsolutePath.Create(env.Context.PathTable, workingDirectory);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
Process pip = CreateErrorProcess(
env.Context,
workingDirectoryAbsolutePath,
destinationAbsolutePath,
errorPattern: regexMatchesSomething ? "ERROR" : "NOMATCH",
errorMessageLength: errorMessageLength);
var testRunChecker = new TestRunChecker();
await testRunChecker.VerifyFailed(env, pip);
AssertErrorEventLogged(ProcessesLogEventId.PipProcessError);
string log = EventListener.GetLog();
XAssert.IsTrue(log.Contains("DX00" + (int)ProcessesLogEventId.PipProcessError));
XAssert.IsTrue(log.Contains("ERROR"), "text 'ERROR' should not be filtered out by error regex.");
if (outputReportingMode == OutputReportingMode.TruncatedOutputOnError)
{
XAssert.IsFalse(log.Contains("WARNING"), "text 'WARNING' should be filtered out by error regex.");
}
else
{
// The full build output is requested, we expect to to have the non-filtered process output to appear
XAssert.IsTrue(log.Contains("WARNING"));
// If there was an error regex that matched something, the "WARNING" part of the error would not be in the standar
// PipProcessError portion. But the user still requested the full unabridged output via the OutputReportingMode
// setting. So we expect to see it repeated on the PipProcessOutput message
if (regexMatchesSomething)
{
XAssert.IsTrue(log.Contains("DX00" + (int)ProcessesLogEventId.PipProcessOutput));
}
}
// Validates that errors don't get truncated when the regex doesn't match anything
if (!regexMatchesSomething && outputReportingMode != OutputReportingMode.TruncatedOutputOnError)
{
// "Z" is a marker at the end of the error message. We should see this in the DX64 message as long as
// we aren't truncating the error
XAssert.IsTrue(Regex.IsMatch(log, $@"(?<Prefix>dx00{(int)ProcessesLogEventId.PipProcessError})(?<AnythingButZ>[^z]*)(?<ZForEndOfError>z)", RegexOptions.IgnoreCase),
"Non-truncated error message was not found in error event. Full output:" + log);
}
},
null,
pathTable => GetConfiguration(pathTable, enableLazyOutputs: false, outputReportingMode: outputReportingMode));
}
[Fact]
public Task FailingProcessWithChunkSizeErrorAndErrorRegex()
{
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string workingDirectory = GetFullPath("work");
AbsolutePath workingDirectoryAbsolutePath = AbsolutePath.Create(env.Context.PathTable, workingDirectory);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
var builder = new StringBuilder();
builder.AppendLine("@echo off");
for (int i = 0; i < SandboxedProcessPipExecutor.OutputChunkInLines * 3 / 2; ++i)
{
if (i % 2 == 0)
{
builder.AppendLine("echo ERROR - " + i);
}
else
{
builder.AppendLine("echo GOOD - " + i);
}
}
Process pip = CreateErrorProcess(
env.Context,
workingDirectoryAbsolutePath,
destinationAbsolutePath,
errorPattern: "ERROR",
errorMessageLength: 0,
scriptContent: builder.ToString());
var testRunChecker = new TestRunChecker();
await testRunChecker.VerifyFailed(env, pip);
AssertErrorEventLogged(ProcessesLogEventId.PipProcessError, count: 1);
},
null,
pathTable => GetConfiguration(pathTable, enableLazyOutputs: false, outputReportingMode: OutputReportingMode.FullOutputOnError));
}
[Theory]
// For all cases except the commented one, if the regex matches every thing, the user gets all the information from the log message,
// the path to original stdout/err log file shouldn't be presented.
// Otherwise, it should be presented.
[InlineData(true, ProcessesLogEventId.PipProcessError, false)]
[InlineData(false, ProcessesLogEventId.PipProcessError, true)]
[InlineData(false, ProcessesLogEventId.PipProcessError, true, 10 * SandboxedProcessPipExecutor.OutputChunkInLines)]
// When the error length exceed limit and outputReportingMode is set to TruncatedOutputOnError,
// even though the regex matches every thing the error message still get truncated, so present the path to original stdout/err log file
[InlineData(true, ProcessesLogEventId.PipProcessError, true, 10 * SandboxedProcessPipExecutor.OutputChunkInLines)]
[InlineData(false, ProcessesLogEventId.PipProcessError, true, 10 * SandboxedProcessPipExecutor.OutputChunkInLines, OutputReportingMode.FullOutputAlways)]
[InlineData(true, ProcessesLogEventId.PipProcessError, false, 10 * SandboxedProcessPipExecutor.OutputChunkInLines, OutputReportingMode.FullOutputAlways)]
[InlineData(true, ProcessesLogEventId.PipProcessWarning, false)]
[InlineData(false, ProcessesLogEventId.PipProcessWarning, true)]
public Task ProcessPrintPathsToLog(bool regexMatchesEverything, ProcessesLogEventId eventId, bool shouldContainLogPath, int errorMessageLength = 0, OutputReportingMode outputReportingMode = OutputReportingMode.TruncatedOutputOnError)
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string workingDirectory = GetFullPath("work");
AbsolutePath workingDirectoryAbsolutePath = AbsolutePath.Create(env.Context.PathTable, workingDirectory);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
if ((int)eventId == (int)ProcessesLogEventId.PipProcessError)
{
Process pip = CreateErrorProcess(
env.Context,
workingDirectoryAbsolutePath,
destinationAbsolutePath,
errorPattern: regexMatchesEverything ? ".*" : "ERROR",
errorMessageLength: errorMessageLength);
var testRunChecker = new TestRunChecker();
await testRunChecker.VerifyFailed(env, pip);
AssertErrorEventLogged(eventId, errorMessageLength > 0 && outputReportingMode == OutputReportingMode.FullOutputAlways ? 3 : 1);
}
else
{
Process pip = CreateWarningProcess(
env.Context,
workingDirectoryAbsolutePath,
destinationAbsolutePath,
regexMatchesEverything ? false : true);
var testRunChecker = new TestRunChecker();
testRunChecker.ExpectWarning();
await testRunChecker.VerifySucceeded(env, pip);
AssertWarningEventLogged(eventId);
}
string log = EventListener.GetLog();
string relatedLog = GetRelatedLog(log, eventId);
if (shouldContainLogPath)
{
XAssert.IsTrue(relatedLog.Contains(destination));
}
else
{
XAssert.IsFalse(relatedLog.Contains(destination));
}
},
null,
pathTable => GetConfiguration(pathTable, enableLazyOutputs: false, outputReportingMode: outputReportingMode));
}
private string GetRelatedLog (string log, ProcessesLogEventId eventId)
{
string start = "DX00"+ ((int)eventId).ToString();
string[] ends = { "WARNING DX", "ERROR DX", "VERBOSE DX" };
string upperCaseLog = log.ToUpper();
int startIndex = upperCaseLog.LastIndexOf(start);
if (startIndex < 0)
{
return string.Empty;
}
int endIndex = log.Length;
foreach (string end in ends)
{
int theEndIndex = upperCaseLog.IndexOf(end, startIndex);
if (theEndIndex > 0 && theEndIndex < endIndex)
{
endIndex = theEndIndex;
}
}
return log.Substring(startIndex, endIndex - startIndex);
}
[Trait(BuildXL.TestUtilities.Features.Feature, BuildXL.TestUtilities.Features.NonStandardOptions)]
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessUncacheableDueToFileMonitoringViolationsInSealedDirectory()
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
// We set monitoring violations to warnings so the pip completes (though uncached).
config: pathTable => GetConfiguration(pathTable, fileAccessIgnoreCodeCoverage: true, failUnexpectedFileAccesses: false, unexpectedFileAccessesAreErrors: false),
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
DirectoryArtifact directoryArtifact = SealDirectoryWithProbeTargets(env, sourceAbsolutePath, omitContents: true);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
string expected = CreateDirectoryWithProbeTargets(env.Context.PathTable, sourceAbsolutePath, fileAContents: "A", fileBContents: "B2");
await testRunChecker.VerifySucceeded(env, pip, expected, expectMarkedPerpetuallyDirty: true);
// Expecting 4 warnings, of which 3 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
AssertVerboseEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccess, count: 2);
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 1);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations, count: 1);
await testRunChecker.VerifySucceeded(env, pip, expected, expectMarkedPerpetuallyDirty: true);
// Expecting 4 warnings, of which 3 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
AssertVerboseEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccess, count: 2);
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 1);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations, count: 1);
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessFailsDueToFileMonitoringViolationsInSealedDirectory()
{
return WithExecutionEnvironment(
// We set monitoring violations to errors so the pip fails due to the directory dependency issue
config: pathTable => GetConfiguration(pathTable, fileAccessIgnoreCodeCoverage: true, failUnexpectedFileAccesses: false, unexpectedFileAccessesAreErrors: true),
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
DirectoryArtifact directoryArtifact = SealDirectoryWithProbeTargets(env, sourceAbsolutePath, omitContents: true);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath);
var testRunChecker = new TestRunChecker();
CreateDirectoryWithProbeTargets(env.Context.PathTable, sourceAbsolutePath, fileAContents: "A", fileBContents: "B2");
// Pip with file monitoring errors succeeds in PipExecutor but fails in the post-process step.
await testRunChecker.VerifySucceeded(env, pip, expectMarkedPerpetuallyDirty: true);
AssertVerboseEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccess, count: 2);
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 1);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations);
// Uncacheable pip due to the file monitoring errors
await testRunChecker.VerifySucceeded(env, pip, expectMarkedPerpetuallyDirty: true);
AssertVerboseEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccess, count: 2);
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 1);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations);
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessCachedWithAllowlistedFileMonitoringViolations()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
// We set monitoring violations to warnings so the pip completes.
config: pathTable => GetConfiguration(pathTable, fileAccessIgnoreCodeCoverage: true, failUnexpectedFileAccesses: false),
allowlistCreator: (context) =>
{
var allowlist = new FileAccessAllowlist(context);
allowlist.Add(
new ExecutablePathAllowlistEntry(
AbsolutePath.Create(context.PathTable, CmdHelper.OsShellExe),
FileAccessAllowlist.RegexWithProperties(Regex.Escape(Path.GetFileName("source"))),
allowsCaching: true,
name: "allowlist1"));
allowlist.Add(
new ExecutablePathAllowlistEntry(
AbsolutePath.Create(context.PathTable, CmdHelper.OsShellExe),
FileAccessAllowlist.RegexWithProperties(Regex.Escape(Path.GetFileName("dest"))),
allowsCaching: true,
name: "allowlist2"));
return allowlist;
},
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, omitDependencies: true);
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, Contents);
// Expecting 3 events, of which 2 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
AssertInformationalEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccessAllowlistedCacheable, count: 2);
await testRunChecker.VerifyUpToDate(env, pip, Contents);
AssertInformationalEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccessAllowlistedCacheable, count: 0);
});
}
// This test uses mock implementations for the interfaces needed by the EngineCache class and tests
// the case when another file has posted a cache content for the same strong fingerprint.
// In such case BuildXL detects the replaced local cache content with the remote one and uses that for consequent pip executions.
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessCachedForCacheConvergence()
{
string localContents;
IgnoreWarnings();
return WithCachingExecutionEnvironmentForCacheConvergence(
GetFullPath(".cache"),
// We set monitoring violations to warnings so the pip completes.
config: pathTable => GetConfigurationForCacheConvergence(pathTable, fileAccessIgnoreCodeCoverage: true),
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("foobar.txt");
List<AbsolutePath> destinationAbsolutePath = new List<AbsolutePath>() { AbsolutePath.Create(env.Context.PathTable, destination) };
Process pip1 = CreateEchoProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, 123, OperatingSystemHelper.IsUnixOS ? "/usr/bin/jot -r 1 1 65000 > foobar.txt" : "%time% > foobar.txt");
var testRunChecker = new TestRunChecker(destination);
await VerifyPipResult(PipResultStatus.Succeeded, env, pip1, false);
XAssert.AreEqual(1, env.OutputFilesProduced, "produced count");
XAssert.AreEqual(0, env.OutputFilesUpToDate, "up to date count");
XAssert.AreEqual(0, env.OutputFilesDeployedFromCache, "deployed from cache count");
string actual = File.ReadAllText(destination);
localContents = actual;
// Rely on timing so that pip1 and pip2 achieve different results without changing the pip's fingerprint.
await Task.Delay(TimeSpan.FromSeconds(1));
Process pip2 = CreateEchoProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, 124, OperatingSystemHelper.IsUnixOS ? "/usr/bin/jot -r 1 1 65000 > foobar.txt" : "%time% > foobar.txt");
// Ensure pip has cache miss so that it will run and subsequently do the convergence check
env.InjectedCacheMissPips.Add(pip2.PipId);
var cache = env.Cache;
var twoPhaseStore = (TestPipExecutorTwoPhaseFingerprintStore)cache.TwoPhaseFingerprintStore;
var priorPublishTries = twoPhaseStore.M_publishTries;
await VerifyPipResult(PipResultStatus.DeployedFromCache, env, pip2, false, checkNoProcessExecutionOnCacheHit: false, expectedExecutionLevel: PipExecutionLevel.Executed);
// Ensure a publish was attempted so we know there was a cache miss and the pip reran
XAssert.AreEqual(priorPublishTries + 1, twoPhaseStore.M_publishTries);
XAssert.AreEqual(1, env.OutputFilesProduced, "produced count");
XAssert.AreEqual(0, env.OutputFilesUpToDate, "up to date count");
XAssert.AreEqual(1, env.OutputFilesDeployedFromCache, "deployed from cache count");
actual = File.ReadAllText(destination);
XAssert.AreEqual(localContents, actual);
// Make sure convergence isn't classified as a remote hit. It might be converging to a remote, but that
// would be making an assumption. This counter is used for logging and it is very confusing to users
// to give a false positive on a remote hit when there was no remote cache. It is better to error on
// the side of undercounting remote hits than overcounting them.
long remoteHits = env.Counters.GetCounterValue(PipExecutorCounter.RemoteCacheHitsForProcessPipDescriptorAndContent);
XAssert.AreEqual(0, remoteHits);
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessWithCacheMaintainsCase()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
var destinationFileNameInPathTable = destinationAbsolutePath.GetName(env.Context.PathTable).ToString(env.Context.StringTable);
string expectedCaseDestinationFileName = "DESt";
// Ensure that file name in path table does NOT match the actual file name to be created by the process
// This ensures that unless file name casing is appropriately stored and used in the cache
// the file name will not match after the deploy from cache step below.
XAssert.AreNotEqual(expectedCaseDestinationFileName, destinationFileNameInPathTable);
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, destinationFileName: expectedCaseDestinationFileName);
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, Contents);
await testRunChecker.VerifyUpToDate(env, pip, Contents);
// Ensure copy pip is creating file name with appropriate case
XAssert.AreEqual(expectedCaseDestinationFileName, FileUtilities.GetFileName(destination).Result);
File.Delete(destination);
await testRunChecker.VerifyDeployedFromCache(env, pip, Contents);
// Ensure process gets appropriate case when deployed from cache
XAssert.AreEqual(expectedCaseDestinationFileName, FileUtilities.GetFileName(destination).Result);
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessNotCachedWithAllowlistedFileMonitoringViolations()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
// We set monitoring violations to warnings so the pip completes.
config: pathTable => GetConfiguration(pathTable, fileAccessIgnoreCodeCoverage: true, failUnexpectedFileAccesses: false),
allowlistCreator: (context) =>
{
var allowlist = new FileAccessAllowlist(context);
allowlist.Add(
new ExecutablePathAllowlistEntry(
AbsolutePath.Create(context.PathTable, CmdHelper.OsShellExe),
FileAccessAllowlist.RegexWithProperties(Regex.Escape(Path.GetFileName("source"))),
allowsCaching: false,
name: "allowlist1"));
allowlist.Add(
new ExecutablePathAllowlistEntry(
AbsolutePath.Create(context.PathTable, CmdHelper.OsShellExe),
FileAccessAllowlist.RegexWithProperties(Regex.Escape(Path.GetFileName("dest"))),
allowsCaching: true,
name: "allowlist2"));
return allowlist;
},
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
Process pip = CreateCopyProcess(env.Context, sourceAbsolutePath, destinationAbsolutePath, omitDependencies: true);
for (int i = 1; i <= 2; i++)
{
var perf = (ProcessPipExecutionPerformance)await VerifyPipResult(PipResultStatus.Succeeded, env, pip, expectMarkedPerpetuallyDirty: true);
XAssert.AreEqual(i, env.OutputFilesProduced, "produced count");
XAssert.AreEqual(0, env.OutputFilesUpToDate, "up to date count");
XAssert.AreEqual(0, env.OutputFilesDeployedFromCache, "deployed from cache count");
// Expecting 3 events, of which 2 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
AssertInformationalEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccessAllowlistedNonCacheable, count: 2);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations);
// Expecting 3 violations, of which 2 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
XAssert.AreEqual(2, perf.FileMonitoringViolations.NumFileAccessesAllowlistedButNotCacheable);
string actual = File.ReadAllText(destination);
XAssert.AreEqual(Contents, actual);
}
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessNotCachedWithAllowlistedFileMonitoringViolationsInSealedDirectory()
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
// We set monitoring violations to warnings so the pip completes.
config: pathTable => GetConfiguration(pathTable, fileAccessIgnoreCodeCoverage: true, failUnexpectedFileAccesses: false),
allowlistCreator: (context) =>
{
var allowlist = new FileAccessAllowlist(context);
allowlist.Add(
new ExecutablePathAllowlistEntry(
AbsolutePath.Create(context.PathTable, CmdHelper.OsShellExe),
FileAccessAllowlist.RegexWithProperties(Regex.Escape(Path.GetFileName("a"))),
allowsCaching: false,
name: "allowlist"));
return allowlist;
},
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
// We omit a and b from the seal; instead they should be handled by the allowlist.
DirectoryArtifact directoryArtifact = SealDirectoryWithProbeTargets(env, sourceAbsolutePath, omitContents: true);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath);
string expected = CreateDirectoryWithProbeTargets(env.Context.PathTable, sourceAbsolutePath, fileAContents: "A", fileBContents: "B2");
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, expected, expectMarkedPerpetuallyDirty: true);
// Expecting 4 events, of which 3 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
AssertInformationalEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccessAllowlistedNonCacheable, count: 2);
// The sealed-directory specific event is only emitted when there are non-allowlisted violations.
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 0);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations, count: 1);
await testRunChecker.VerifySucceeded(env, pip, expected, expectMarkedPerpetuallyDirty: true);
// Expecting 4 events, of which 3 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
AssertInformationalEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccessAllowlistedNonCacheable, count: 2);
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 0);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations, count: 1);
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessCachedWithAllowlistedFileMonitoringViolationsInSealedDirectory()
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
// We set monitoring violations to warnings so the pip completes.
config: pathTable => GetConfiguration(pathTable, fileAccessIgnoreCodeCoverage: true, failUnexpectedFileAccesses: false),
allowlistCreator: (context) =>
{
var allowlist = new FileAccessAllowlist(context);
allowlist.Add(
new ExecutablePathAllowlistEntry(
AbsolutePath.Create(context.PathTable, CmdHelper.OsShellExe),
FileAccessAllowlist.RegexWithProperties(Regex.Escape(Path.GetFileName("a"))),
allowsCaching: true,
name: "allowlist"));
return allowlist;
},
act: async env =>
{
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
// We omit a and b from the seal; instead 'a' should be handled by the allowlist and so 'b' should not be accessed.
// (logic is 'if a is absent, poke b')
DirectoryArtifact directoryArtifact = SealDirectoryWithProbeTargets(env, sourceAbsolutePath, omitContents: true);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
string expected = CreateDirectoryWithProbeTargets(env.Context.PathTable, sourceAbsolutePath, fileAContents: "A", fileBContents: "B2");
await testRunChecker.VerifySucceeded(env, pip, expected);
// Expecting 4 events, of which 3 are collapsed into one due to similar file access type.
// Events ignore the function used for the access.
AssertInformationalEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccessAllowlistedCacheable, count: 2);
// The sealed-directory specific event is only emitted when there are non-allowlisted violations.
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 0);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations, count: 0);
await testRunChecker.VerifyUpToDate(env, pip, expected);
AssertInformationalEventLogged(ProcessesLogEventId.PipProcessDisallowedFileAccessAllowlistedCacheable, count: 0);
AssertVerboseEventLogged(LogEventId.DisallowedFileAccessInSealedDirectory, count: 0);
AssertWarningEventLogged(LogEventId.ProcessNotStoredToCacheDueToFileMonitoringViolations, count: 0);
});
}
[Fact(Skip = "Currently, mount tokenization is not supported")]
public async Task CachedProcessAfterReroot()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
string cacheDir = GetFullPath(".cache");
string originalDir = GetFullPath("original");
string rerootDir = GetFullPath("reroot");
Directory.CreateDirectory(originalDir);
Directory.CreateDirectory(rerootDir);
// ROOT => original
await WithCachingExecutionEnvironment(
cacheDir,
async env =>
{
string originalSource = GetFullPath(@"original\source");
AbsolutePath originalSourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, originalSource);
string originalDestination = GetFullPath(@"original\dest");
AbsolutePath originalDestinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, originalDestination);
File.WriteAllText(originalDestination, BadContents);
File.WriteAllText(originalSource, Contents);
Process originalPip = CreateCopyProcess(env.Context, originalSourceAbsolutePath, originalDestinationAbsolutePath, options: Process.Options.ProducesPathIndependentOutputs);
await VerifyPipResult(PipResultStatus.Succeeded, env, originalPip);
XAssert.AreEqual(1, env.OutputFilesProduced, "produced count");
XAssert.AreEqual(0, env.OutputFilesUpToDate, "up to date count");
XAssert.AreEqual(0, env.OutputFilesDeployedFromCache, "deployed from cache count");
string actual = File.ReadAllText(originalDestination);
XAssert.AreEqual(Contents, actual);
},
createMountExpander: pathTable =>
{
var mounts = new MountPathExpander(pathTable);
mounts.Add(
pathTable,
new Mount()
{
Name = PathAtom.Create(pathTable.StringTable, "ROOT"),
Path = AbsolutePath.Create(pathTable, originalDir)
});
return mounts;
});
// ROOT => reroot (but the same content is present, so we should get a cache hit due to root trimming in fingerprinting and in the cache descriptor).
await WithCachingExecutionEnvironment(
cacheDir,
async env =>
{
string rerootedSource = GetFullPath(@"reroot\source");
AbsolutePath rerootedSourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, rerootedSource);
string rerootedDestination = GetFullPath(@"reroot\dest");
AbsolutePath rerootedDestinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, rerootedDestination);
Process rerootedPip = CreateCopyProcess(env.Context, rerootedSourceAbsolutePath, rerootedDestinationAbsolutePath, options: Process.Options.ProducesPathIndependentOutputs);
var testRunChecker = new TestRunChecker(rerootedDestination);
// Both the source and destination should be the same (see above after the pip runs); just the root has moved.
File.WriteAllText(rerootedDestination, Contents);
File.WriteAllText(rerootedSource, Contents);
// First let's ensure that UpToDate-ness is possible if somehow we already *knew* we had the right content.
// (without this call we'd pessimistically replace the destination rather than hashing it).
await env.FileContentTable.GetAndRecordContentHashAsync(rerootedDestination);
await testRunChecker.VerifyUpToDate(env, rerootedPip, Contents);
File.Delete(rerootedDestination);
await testRunChecker.VerifyDeployedFromCache(env, rerootedPip, Contents);
},
createMountExpander: pathTable =>
{
var mounts = new MountPathExpander(pathTable);
mounts.Add(
pathTable,
new Mount()
{
Name = PathAtom.Create(pathTable.StringTable, "ROOT"),
Path = AbsolutePath.Create(pathTable, rerootDir)
});
return mounts;
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessWithDirectoryEnumeration_Bug1021066()
{
string sourceDir = GetFullPath("inc");
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
env.RecordExecution();
var mutableConfiguration = (ConfigurationImpl)env.Configuration;
// Bug #1021066 only reproduces if this flag is set
mutableConfiguration.Schedule.TreatDirectoryAsAbsentFileOnHashingInputContent = true;
var pathTable = env.Context.PathTable;
PathTable.DebugPathTable = pathTable;
AbsolutePath sourceDirAbsolutePath = AbsolutePath.Create(pathTable, sourceDir);
string destination = GetFullPath("out");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination);
DirectoryArtifact directoryArtifact = SealSourceDirectoryWithProbeTargets(env, sourceDirAbsolutePath, true);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath,
script: OperatingSystemHelper.IsUnixOS ? "/usr/bin/find . -type f -exec /bin/cat {} + > /dev/null" : "echo off & for /f %i IN ('dir *.* /b /s') do type %i");
var testRunChecker = new TestRunChecker(destination);
Dictionary<string, string> fs = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
// The following cases exercise runtime observation and usage thereof, such as VerifyInputAssertions.
// Note that the input directory is not explicitly sealed (no scheduler); the pip executor used here returns
// 'sealed' hashes for files so long as they exist.
WriteFile(fs, pathTable, sourceDirAbsolutePath, file: OperatingSystemHelper.IsUnixOS ? "dir/A" : @"dir\A", contents: "A0");
await testRunChecker.VerifySucceeded(env, pip, GetExpected(fs));
VerifyExecutionObservedEnumerationsAndClear(env, pathTable, sourceDirAbsolutePath, ".", "dir");
// Add new file in nested invalidates cache
WriteFile(fs, pathTable, sourceDirAbsolutePath, file: OperatingSystemHelper.IsUnixOS ? "dir/B" : @"dir\B", contents: "B0");
await testRunChecker.VerifySucceeded(env, pip, GetExpected(fs));
VerifyExecutionObservedEnumerationsAndClear(env, pathTable, sourceDirAbsolutePath, ".", "dir");
// Rebuild without changes does not invalidate cache
await testRunChecker.VerifyUpToDate(env, pip, GetExpected(fs));
// Touching file does not invalidate cache
WriteFile(fs, pathTable, sourceDirAbsolutePath, file: @"dir\B", contents: "B0");
await testRunChecker.VerifyUpToDate(env, pip, GetExpected(fs));
// Change file invalidates cache
WriteFile(fs, pathTable, sourceDirAbsolutePath, file: OperatingSystemHelper.IsUnixOS ? "dir/B" : @"dir\B", contents: "B1");
await testRunChecker.VerifySucceeded(env, pip, GetExpected(fs));
VerifyExecutionObservedEnumerationsAndClear(env, pathTable, sourceDirAbsolutePath, ".", "dir");
// Add new file under root directory
WriteFile(fs, pathTable, sourceDirAbsolutePath, file: @"C", contents: "C0");
await testRunChecker.VerifySucceeded(env, pip, GetExpected(fs));
VerifyExecutionObservedEnumerationsAndClear(env, pathTable, sourceDirAbsolutePath, ".", "dir");
// Add new directory under root directory
WriteFile(fs, pathTable, sourceDirAbsolutePath, file: OperatingSystemHelper.IsUnixOS ? "dir2/B" : @"dir2\B", contents: "B0");
await testRunChecker.VerifySucceeded(env, pip, GetExpected(fs));
VerifyExecutionObservedEnumerationsAndClear(env, pathTable, sourceDirAbsolutePath, ".", "dir", "dir2");
},
createMountExpander: pathTable =>
{
var mounts = new MountPathExpander(pathTable);
mounts.Add(
pathTable,
new Mount()
{
Name = PathAtom.Create(pathTable.StringTable, "ROOT"),
Path = AbsolutePath.Create(pathTable, sourceDir),
IsReadable = true,
TrackSourceFileChanges = true
});
return mounts;
});
}
private void VerifyExecutionObservedEnumerationsAndClear(
DummyPipExecutionEnvironment env,
PathTable pathTable,
AbsolutePath directory,
params string[] relativeSubDirectories)
{
string expanded = directory.ToString(pathTable);
HashSet<AbsolutePath> expectedEnumerations = new HashSet<AbsolutePath>(
relativeSubDirectories.Select(sd => sd == "." ? directory : directory.Combine(pathTable, RelativePath.Create(pathTable.StringTable, sd))));
var fingerprintData = env.ExecutionLogRecorder
.GetEvents<ProcessFingerprintComputationEventData>()
.Where(pf => pf.Kind == FingerprintComputationKind.Execution)
.SingleOrDefault();
foreach (var observedInput in fingerprintData.StrongFingerprintComputations[0].ObservedInputs)
{
if (observedInput.Type == ObservedInputType.DirectoryEnumeration)
{
expectedEnumerations.Remove(observedInput.Path);
}
}
Assert.Empty(expectedEnumerations);
env.ExecutionLogRecorder.Clear();
}
private string GetExpected(Dictionary<string, string> fs)
{
// Replace path separator with char.MinValue to ensure proper sort order
return string.Join(
string.Empty,
fs.OrderBy(kvp => kvp.Key.Replace(Path.DirectorySeparatorChar, char.MinValue), StringComparer.OrdinalIgnoreCase)
.Select(kvp => kvp.Value)) + Environment.NewLine;
}
private void WriteFile(
Dictionary<string, string> fs,
PathTable pathTable,
AbsolutePath directory,
string file,
string contents)
{
string expanded = Path.Combine(directory.ToString(pathTable), file);
Directory.CreateDirectory(Path.GetDirectoryName(expanded));
if (contents == null)
{
fs.Remove(file);
File.Delete(expanded);
}
else
{
fs[file] = contents;
File.WriteAllText(expanded, contents);
}
}
[Fact]
public Task ProcessWithDirectoryInput()
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
var pathTable = env.Context.PathTable;
string sourceDir = GetFullPath("inc");
AbsolutePath sourceDirAbsolutePath = AbsolutePath.Create(pathTable, sourceDir);
string destination = GetFullPath("out");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination);
DirectoryArtifact directoryArtifact = SealDirectoryWithProbeTargets(env, sourceDirAbsolutePath);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
// The following cases exercise runtime observation and usage thereof, such as VerifyInputAssertions.
// Note that the input directory is not explicitly sealed (no scheduler); the pip executor used here returns
// 'sealed' hashes for files so long as they exist.
string expected = CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A0", fileBContents: "B0");
await testRunChecker.VerifySucceeded(env, pip, expected);
expected = CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A", fileBContents: "B");
await testRunChecker.VerifySucceeded(env, pip, expected);
// Up-to-date since B wasn't read last time.
expected = CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A", fileBContents: "B2");
await testRunChecker.VerifyUpToDate(env, pip, expected);
// Runs since A was deleted.
expected = CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B");
await testRunChecker.VerifySucceeded(env, pip, expected);
// Runs since B was changed (to a value used before, but not observed)
expected = CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2");
await testRunChecker.VerifySucceeded(env, pip, expected);
// FixPoint when using B
expected = CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2");
await testRunChecker.VerifyUpToDate(env, pip, expected);
// Verify deployed from cache since it matches the first run
expected = CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A0", fileBContents: "B0");
await testRunChecker.VerifyDeployedFromCache(env, pip, expected);
});
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessWithSourceDirectorTopDirectoryOnlyDirectoriesInput()
{
return ProcessWithSourceDirectoryHelper(false);
}
[FactIfSupported(requiresWindowsBasedOperatingSystem: true)]
public Task ProcessWithSourceDirectoryAllDirectoriesInput()
{
return ProcessWithSourceDirectoryHelper(true);
}
public Task ProcessWithSourceDirectoryHelper(bool allDirectories)
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
var pathTable = env.Context.PathTable;
string sourceDir = GetFullPath("inc");
AbsolutePath sourceDirAbsolutePath = AbsolutePath.Create(pathTable, sourceDir);
string destination = GetFullPath("out");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination);
var dummyContents = new List<AbsolutePath>
{
sourceDirAbsolutePath.Combine(pathTable, "a"),
sourceDirAbsolutePath.Combine(pathTable, "b"),
sourceDirAbsolutePath.Combine(pathTable, "echo"),
};
if (allDirectories)
{
dummyContents.Add(sourceDirAbsolutePath.Combine(pathTable, "CFolder").Combine(pathTable, "c"));
}
DirectoryArtifact directoryArtifact = SealSourceDirectoryWithProbeTargets(env, sourceDirAbsolutePath, allDirectories, dummyContents.ToArray());
var script = allDirectories ?
OperatingSystemHelper.IsUnixOS ?
"if [ -f a ]; then /bin/cat a; else /bin/cat b; fi; if [ -f CFolder/c ]; then /bin/cat CFolder/c; fi;" :
"( if exist a (type a) else (type b) ) & ( if exist CFolder\\c (type CFolder\\c) )" :
null;
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath, script);
var testRunChecker = new TestRunChecker(destination);
testRunChecker.ExpectedContentsSuffix = Environment.NewLine;
// The following cases exercise runtime observation and usage thereof, such as VerifyInputAssertions.
// Note that the input directory is not explicitly sealed (no scheduler); the pip executor used here returns
// 'sealed' hashes for files so long as they exist.
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A", fileBContents: "B", fileCContents: "C");
await testRunChecker.VerifySucceeded(env, pip, allDirectories ? "AC" : "A");
// Up-to-date since B wasn't read last time.
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A", fileBContents: "B2", fileCContents: "C");
await testRunChecker.VerifyUpToDate(env, pip, allDirectories ? "AC" : "A");
// Runs since A was deleted.
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B", fileCContents: "C");
await testRunChecker.VerifySucceeded(env, pip, allDirectories ? "BC" : "B");
// Runs since C was changed if we are testing all directories
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B", fileCContents: "C2");
if (allDirectories)
{
await testRunChecker.VerifySucceeded(env, pip, "BC2");
}
else
{
await testRunChecker.VerifyUpToDate(env, pip, "B");
}
// Runs since B was changed (to a value used before, but not observed and C is back)
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2", fileCContents: "C2");
await testRunChecker.VerifySucceeded(env, pip, allDirectories ? "B2C2" : "B2");
// Runs since C was changed when doing all directories
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2", fileCContents: "C");
if (allDirectories)
{
await testRunChecker.VerifySucceeded(env, pip, "B2C");
}
else
{
await testRunChecker.VerifyUpToDate(env, pip, "B2");
}
// Fixpoint when using B
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2", fileCContents: "C");
await testRunChecker.VerifyUpToDate(env, pip, allDirectories ? "B2C" : "B2");
// Rerun (yet fail) because echo has changed which is probed under the sealed directory
CreateDirectoryWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2", fileCContents: "C");
File.WriteAllText(sourceDirAbsolutePath.Combine(pathTable, "echo").ToString(pathTable), "FAIL ECHO INVOCATION");
await testRunChecker.VerifyFailed(env, pip, allDirectories ? "B2C" : "B2");
SetExpectedFailures(1, 0, "'echo.' is not recognized as an internal or external command");
});
}
/// <summary>
/// Tests that we can have a partially-sealed directory input for D\A and D\B while also writing D\C (write access to B should not be denied).
/// </summary>
[Fact]
public Task ProcessWithOutputInsideDirectoryInput()
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
var pathTable = env.Context.PathTable;
string dir = GetFullPath("out");
AbsolutePath dirAbsolutePath = AbsolutePath.Create(pathTable, dir);
string destination = Path.Combine(dir, "c");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination);
DirectoryArtifact directoryArtifact = SealDirectoryWithProbeTargets(env, dirAbsolutePath);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
string expected = CreateDirectoryWithProbeTargets(env.Context.PathTable, dirAbsolutePath, fileAContents: "A", fileBContents: "B");
await testRunChecker.VerifySucceeded(env, pip, expected);
});
}
[Fact]
public Task ProcessWithSharedOpaqueDirectoryOutputs()
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
var pathTable = env.Context.PathTable;
string dir = GetFullPath("out");
AbsolutePath dirAbsolutePath = AbsolutePath.Create(pathTable, dir);
string destination = Path.Combine(dir, "c");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination);
DirectoryArtifact directoryArtifact = SealDirectoryWithProbeTargets(env, dirAbsolutePath);
AbsolutePath sodOutputPath = AbsolutePath.Create(pathTable, Path.Combine(dir, "sod"));
DirectoryArtifact sodOutput = new DirectoryArtifact(sodOutputPath, partialSealId: 1, isSharedOpaque: true);
Process pip = CreateDirectoryProbingProcess(env.Context, directoryArtifact, destinationAbsolutePath, directoryOutput: sodOutput);
XAssert.IsTrue(pip.HasSharedOpaqueDirectoryOutputs);
var testRunChecker = new TestRunChecker(destination);
string expected = CreateDirectoryWithProbeTargets(env.Context.PathTable, dirAbsolutePath, fileAContents: "A", fileBContents: "B");
await testRunChecker.VerifySucceeded(env, pip, expected, expectMarkedPerpetuallyDirty: true);
await testRunChecker.VerifyUpToDate(env, pip, expected, expectMarkedPerpertuallyDirty: true);
});
}
[Fact]
public Task ProcessWithMultipleDirectoryInputs()
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
var pathTable = env.Context.PathTable;
string sourceDir = GetFullPath("inc");
AbsolutePath sourceDirAbsolutePath = AbsolutePath.Create(pathTable, sourceDir);
string destination = GetFullPath("out");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(pathTable, destination);
Tuple<DirectoryArtifact, DirectoryArtifact> subdirectories = SealMultipleDirectoriesWithProbeTargets(env, sourceDirAbsolutePath);
Process pip = CreateMultiDirectoryProbingProcess(env.Context, sourceDirAbsolutePath, subdirectories, destinationAbsolutePath);
var testRunChecker = new TestRunChecker(destination);
// The following cases exercise runtime observation and usage thereof, such as VerifyInputAssertions.
// Note that the input directory is not explicitly sealed (no scheduler); the pip executor used here returns
// 'sealed' hashes for files so long as they exist.
string expected = CreateMultipleDirectoriesWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A", fileBContents: "B");
await testRunChecker.VerifySucceeded(env, pip, expected);
// Up-to-date since B wasn't read last time.
expected = CreateMultipleDirectoriesWithProbeTargets(pathTable, sourceDirAbsolutePath, fileAContents: "A", fileBContents: "B2");
await testRunChecker.VerifyUpToDate(env, pip, expected);
// Runs since A was deleted.
expected = CreateMultipleDirectoriesWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B");
await testRunChecker.VerifySucceeded(env, pip, expected);
// Runs since B was changed (to a value used before, but not observed)
expected = CreateMultipleDirectoriesWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2");
await testRunChecker.VerifySucceeded(env, pip, expected);
// FixPoint when using B
expected = CreateMultipleDirectoriesWithProbeTargets(pathTable, sourceDirAbsolutePath, fileBContents: "B2");
await testRunChecker.VerifyUpToDate(env, pip, expected);
});
}
[Fact]
public Task ProcessCleansTempDirBeforeRunning()
{
return WithExecutionEnvironment(
async env =>
{
// Create a temp directory with a file in it from a previous run
string tempDir = GetFullPath("temp");
AbsolutePath tempPath = AbsolutePath.Create(env.Context.PathTable, tempDir);
Directory.CreateDirectory(tempDir);
string oldFile = Path.Combine(tempDir, "oldfile.txt");
AbsolutePath oldFilePath = AbsolutePath.Create(env.Context.PathTable, oldFile);
File.WriteAllText(oldFile, "asdf");
// Run a pip that checks whether the temp file exists or not
var pipDataBuilder = new PipDataBuilder(env.Context.StringTable);
if (OperatingSystemHelper.IsUnixOS)
{
pipDataBuilder.Add("-c");
}
else
{
pipDataBuilder.Add("/d");
pipDataBuilder.Add("/c");
}
using (pipDataBuilder.StartFragment(PipDataFragmentEscaping.CRuntimeArgumentRules, " "))
{
pipDataBuilder.Add(OperatingSystemHelper.IsUnixOS ? "if [ -f " + oldFile + " ]; then echo exists; else echo not exist; fi;" : "if exist \"" + oldFile + "\" (echo exists) else (echo not exist)");
}
// Redirect standard out to a file
string stdOut = GetFullPath("stdout.txt");
AbsolutePath stdOutPath = AbsolutePath.Create(env.Context.PathTable, stdOut);
var standardOut = FileArtifact.CreateSourceFile(stdOutPath).CreateNextWrittenVersion();
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(env.Context.PathTable, CmdHelper.OsShellExe));
Process p =
AssignFakePipId(new Process(
executable: exe,
workingDirectory: AbsolutePath.Create(env.Context.PathTable, TemporaryDirectory),
arguments: pipDataBuilder.ToPipData(" ", PipDataFragmentEscaping.NoEscaping),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: standardOut,
standardError: FileArtifact.Invalid,
standardDirectory: FileArtifact.CreateSourceFile(stdOutPath.GetParent(env.Context.PathTable)).CreateNextWrittenVersion(),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.FromWithoutCopy(standardOut.WithAttributes()),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.Empty,
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(env.Context.PathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(env.Context.PathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
options: Process.Options.ProducesPathIndependentOutputs,
provenance: PipProvenance.CreateDummy(env.Context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty,
tempDirectory: tempPath));
await VerifyPipResult(PipResultStatus.Succeeded, env, p);
XAssert.AreEqual(1, env.OutputFilesProduced, "produced count");
// Verify that the file did not exist by the time the pip ran
string actual = File.ReadAllText(stdOut).Trim();
XAssert.AreEqual("not exist", actual);
});
}
[Fact]
public async Task ProcessRetriesDueToExitCode()
{
const int RetryCount = 3;
await WithExecutionEnvironment(
config: pathTable => GetConfiguration(pathTable, retryCount: RetryCount),
act: async env =>
{
var pathTable = env.Context.PathTable;
var stringTable = env.Context.StringTable;
var batchScriptPath = AbsolutePath.Create(pathTable, OperatingSystemHelper.IsUnixOS ? GetFullPath("test.sh"): GetFullPath("test.bat"));
File.WriteAllText(
batchScriptPath.ToString(pathTable),
OperatingSystemHelper.IsUnixOS ? "echo test;exit 3;printf '\n'" : @"
ECHO test
EXIT /b 3
");
if (OperatingSystemHelper.IsUnixOS)
{
chmod(batchScriptPath.ToString(pathTable), 0x1ff);
}
// Redirect standard out to a file
var stdOutFile = FileArtifact.CreateOutputFile(AbsolutePath.Create(pathTable, GetFullPath("stdout.txt")));
var stdErrFile = FileArtifact.CreateOutputFile(AbsolutePath.Create(pathTable, GetFullPath("stderr.txt")));
// Build arguments.
var pipDataBuilder = new PipDataBuilder(stringTable);
if (OperatingSystemHelper.IsUnixOS)
{
pipDataBuilder.Add("-c");
}
else
{
pipDataBuilder.Add("/d");
pipDataBuilder.Add("/c");
}
using (pipDataBuilder.StartFragment(PipDataFragmentEscaping.CRuntimeArgumentRules, " "))
{
pipDataBuilder.Add(batchScriptPath);
}
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
Process p =
AssignFakePipId(
new Process(
executable: exe,
workingDirectory: AbsolutePath.Create(pathTable, TemporaryDirectory),
arguments: pipDataBuilder.ToPipData(" ", PipDataFragmentEscaping.NoEscaping),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: stdOutFile,
standardError: stdErrFile,
standardDirectory: stdOutFile.Path.GetParent(pathTable),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe, FileArtifact.CreateSourceFile(batchScriptPath)),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.FromWithoutCopy(stdOutFile.WithAttributes()),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.Empty,
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
retryExitCodes: ReadOnlyArray<int>.FromWithoutCopy(3),
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
options: Process.Options.ProducesPathIndependentOutputs,
provenance: PipProvenance.CreateDummy(env.Context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty,
processRetries: env.Configuration.Schedule.ProcessRetries));
await VerifyPipResult(PipResultStatus.Failed, env, p);
AssertVerboseEventLogged(LogEventId.PipWillBeRetriedDueToExitCode, count: RetryCount);
AssertLogContains(false, "Standard error:");
AssertLogContains(false, "Standard output:");
});
SetExpectedFailures(1, 0);
}
[Fact]
public Task PreserveOutputsWithDisallowedSandbox()
{
const string PriorOutput = "PriorOutputContent";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
XAssert.AreNotEqual(await RunPreviousOutputsTest(env, PriorOutput, allowPreviousOutputs: true), PriorOutput);
},
null,
pathTable => GetConfiguration(pathTable, enableLazyOutputs: false,
preserveOutputs: PreserveOutputsMode.Disabled));
}
[Fact]
public Task PreserveOutputsWithAllowedSandboxButDisallowedPip()
{
const string PriorOutput = "PriorOutputContent";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
XAssert.AreNotEqual(await RunPreviousOutputsTest(env, PriorOutput, allowPreviousOutputs: false), PriorOutput);
},
null,
pathTable => GetConfiguration(pathTable, enableLazyOutputs: false,
preserveOutputs: PreserveOutputsMode.Enabled));
}
[Fact]
public Task PreserveOutputsAllowed()
{
const string PriorOutput = "PriorOutputContent";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
XAssert.AreEqual(await RunPreviousOutputsTest(env, PriorOutput, allowPreviousOutputs: true), PriorOutput);
},
null,
pathTable => GetConfiguration(pathTable, enableLazyOutputs: false, preserveOutputs: PreserveOutputsMode.Enabled));
}
private async Task<string> RunPreviousOutputsTest(DummyPipExecutionEnvironment env, string content, bool allowPreviousOutputs)
{
string workingDirectory = GetFullPath("work");
AbsolutePath workingDirectoryAbsolutePath = AbsolutePath.Create(env.Context.PathTable, workingDirectory);
string out1 = GetFullPath("out1");
AbsolutePath out1AbsolutePath = AbsolutePath.Create(env.Context.PathTable, out1);
File.WriteAllText(out1, content);
string out2 = GetFullPath("out2");
AbsolutePath out2AbsolutePath = AbsolutePath.Create(env.Context.PathTable, out2);
Process pip = CreatePreviousOutputCheckerProcess(env.Context, workingDirectoryAbsolutePath, out1AbsolutePath, out2AbsolutePath, allowPreviousOutputs);
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
return File.ReadAllText(out2);
}
/// <summary>
/// This test uses inline data to be independent of the order in which the cache enumerate the publish entries.
/// </summary>
/// <remarks>
/// In this test we publish Hello-0, Hello-1 entries to the cache. When we enumerate the publish entry refs from the cache,
/// We don't want to depend on the order in which Hello-0 and Hello-1 come. If Hello-0 comes later, then reverting
/// to Hello-0 will fail. But if Hello-1 comes later, then reverting to Hello-1 will fail. Thus, we use inline data
/// to test both reverting to Hello-0 and Hello-1.
/// </remarks>
[Theory]
[InlineData("Hello-0")]
[InlineData("Hello-1")]
public Task TestCacheHitPipResultShouldContainDynamicallyObservedInputs(string revert)
{
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
string inputPath = GetFullPath("input");
FileArtifact dependency = FileArtifact.CreateSourceFile(AbsolutePath.Create(env.Context.PathTable, inputPath));
File.WriteAllText(inputPath, "Hello");
string outputPath = GetFullPath("out");
FileArtifact output = FileArtifact.CreateOutputFile(AbsolutePath.Create(env.Context.PathTable, outputPath));
string directoryPath = GetFullPath("dir");
DirectoryArtifact directoryDependency = DirectoryArtifact.CreateWithZeroPartialSealId(AbsolutePath.Create(env.Context.PathTable, directoryPath));
Directory.CreateDirectory(directoryPath);
AbsolutePath directoryMemberPath = directoryDependency.Path.Combine(env.Context.PathTable, "dir-input");
string expandedDirectoryMemberPath = directoryMemberPath.ToString(env.Context.PathTable);
File.WriteAllText(expandedDirectoryMemberPath, "Hello-0");
env.SetSealedDirectoryContents(directoryDependency, FileArtifact.CreateSourceFile(directoryMemberPath));
AbsolutePath standardDirectory = output.Path.GetParent(env.Context.PathTable);
AbsolutePath workingDirectory = directoryDependency.Path;
string script = OperatingSystemHelper.IsUnixOS ? I($"/bin/cat {expandedDirectoryMemberPath} > {outputPath}") : I($"type {expandedDirectoryMemberPath} > {outputPath}");
Process process = CreateCmdProcess(
env.Context,
script,
workingDirectory,
standardDirectory,
outputs: new[] { output.WithAttributes() },
directoryDependencies: new[] { directoryDependency });
// Run pip with "Hello-0" and then with "Hello-1" for the observed input.
// Thus, we will have two published entries in the cache with the same weak FP and path set.
await VerifyPipResult(
PipResultStatus.Succeeded,
env,
process,
customVerify:
pipResult =>
{
XAssert.IsTrue(pipResult.DynamicallyObservedFiles.Contains(directoryMemberPath));
});
File.WriteAllText(expandedDirectoryMemberPath, "Hello-1");
await VerifyPipResult(
PipResultStatus.Succeeded,
env,
process,
customVerify:
pipResult =>
{
XAssert.IsTrue(pipResult.DynamicallyObservedFiles.Contains(directoryMemberPath));
});
// Modify static input and the run pip with random GUID value for observed input.
File.WriteAllText(inputPath, "World");
File.WriteAllText(expandedDirectoryMemberPath, Guid.NewGuid().ToString());
await VerifyPipResult(PipResultStatus.Succeeded, env, process);
// Revert static input to its original value.
// Revert observed input to either "Hello-0" or "Hello-1" depending on the test.
File.WriteAllText(inputPath, "Hello");
File.WriteAllText(expandedDirectoryMemberPath, revert);
await VerifyPipResult(
PipResultStatus.DeployedFromCache,
env,
process,
customVerify:
pipResult =>
{
// BUG1158938 causes it to fail.
XAssert.IsTrue(pipResult.DynamicallyObservedFiles.Contains(directoryMemberPath));
});
});
}
private static async Task<PipExecutionPerformance> VerifyPipResult(
PipResultStatus expected,
DummyPipExecutionEnvironment env,
Pip pip,
bool verifyFingerprint = true,
bool checkNoProcessExecutionOnCacheHit = true,
bool expectMarkedPerpetuallyDirty = false,
PipExecutionLevel? expectedExecutionLevel = null,
Action<PipResult> customVerify = null)
{
// TODO: Add code below to validate the two way fingerprint.
XAssert.IsTrue(expected.IndicatesExecution(), "Expected result shouldn't be returned by PipExecutor");
ContentFingerprint calculatedFingerprint = ContentFingerprint.Zero;
bool checkFingerprint = pip is Process && verifyFingerprint;
// Reset the file content manager to ensure pip runs with clean materialization state
env.ResetFileContentManager();
// Reset the file system view to make sure that we do not use a cached information for existence checks
env.ResetFileSystemView();
var operationTracker = new OperationTracker(env.LoggingContext);
PipResult result;
using (var operationContext = operationTracker.StartOperation(PipExecutorCounter.PipRunningStateDuration, pip.PipId, pip.PipType, env.LoggingContext))
{
result = await TestPipExecutor.ExecuteAsync(operationContext, env, pip);
}
XAssert.AreEqual(expected, result.Status, "PipResult's status didn't match");
XAssert.AreEqual(
expectMarkedPerpetuallyDirty,
result.MustBeConsideredPerpetuallyDirty,
"PipResult's MustBeConsideredPerpetuallyDirty didn't match");
if (checkFingerprint)
{
// Need to call this after running pip executor to ensure inputs are hashed
calculatedFingerprint = env.ContentFingerprinter.ComputeWeakFingerprint((Process)pip);
ProcessPipExecutionPerformance processPerf = (ProcessPipExecutionPerformance)result.PerformanceInfo;
XAssert.AreEqual(calculatedFingerprint.Hash, processPerf.Fingerprint, "Calculated fingerprint and returned value not equal.");
}
if (pip is Process)
{
XAssert.IsNotNull(result.PerformanceInfo, "Performance info should have been reported");
XAssert.AreEqual(expectedExecutionLevel ?? expected.ToExecutionLevel(), result.PerformanceInfo.ExecutionLevel);
Assert.IsType(typeof(ProcessPipExecutionPerformance), result.PerformanceInfo);
if (checkNoProcessExecutionOnCacheHit)
{
if (result.PerformanceInfo.ExecutionLevel == PipExecutionLevel.Cached ||
result.PerformanceInfo.ExecutionLevel == PipExecutionLevel.UpToDate)
{
XAssert.AreEqual(TimeSpan.Zero, ((ProcessPipExecutionPerformance)result.PerformanceInfo).ProcessExecutionTime);
}
}
}
customVerify?.Invoke(result);
return result.PerformanceInfo;
}
private static string CreateInMemoryJsonConfigString(string cacheId)
{
const string DefaultInMemoryJsonConfigString = @"{{
""Assembly"":""BuildXL.Cache.InMemory"",
""Type"": ""BuildXL.Cache.InMemory.MemCacheFactory"",
""CacheId"":""{0}"",
""StrictMetadataCasCoupling"":false
}}";
return string.Format(DefaultInMemoryJsonConfigString, cacheId);
}
private Task WithCachingExecutionEnvironmentForCacheConvergence(
string cacheDir,
Func<DummyPipExecutionEnvironment, Task> act,
Func<PathTable, SemanticPathExpander> createMountExpander = null,
Func<PathTable, IConfiguration> config = null,
Func<PathTable, StringTable, FileAccessAllowlist> allowlistCreator = null)
{
return WithExecutionEnvironmentForCacheConvergence(act, createMountExpander, config: config, allowlistCreator: allowlistCreator);
}
private Task WithCachingExecutionEnvironment(
string cacheDir,
Func<DummyPipExecutionEnvironment, Task> act,
Func<PathTable, SemanticPathExpander> createMountExpander = null,
Func<PathTable, IConfiguration> config = null,
Func<PipExecutionContext, FileAccessAllowlist> allowlistCreator = null,
bool useInMemoryCache = true)
{
return WithExecutionEnvironment(act, InMemoryCacheFactory.Create, createMountExpander, config: config, allowlistCreator: allowlistCreator);
}
private Task WithExecutionEnvironmentForCacheConvergence(
Func<DummyPipExecutionEnvironment, Task> act,
Func<PathTable, SemanticPathExpander> createMountExpander = null,
Func<PathTable, IConfiguration> config = null,
Func<PathTable, StringTable, FileAccessAllowlist> allowlistCreator = null)
{
var context = BuildXLContext.CreateInstanceForTesting();
FileAccessAllowlist fileAccessAllowlist = allowlistCreator?.Invoke(context.PathTable, context.StringTable);
return act(
CreateExecutionEnvironmentForCacheConvergence(
context,
mountExpander: createMountExpander == null ? SemanticPathExpander.Default : createMountExpander(context.PathTable),
config: config,
fileAccessAllowlist: fileAccessAllowlist));
}
private Task WithExecutionEnvironmentAndIpcServer(
IIpcProvider ipcProvider,
IIpcOperationExecutor ipcExecutor,
Func<DummyPipExecutionEnvironment, IIpcMoniker, IServer, Task> act,
Func<EngineCache> cache = null,
Func<PathTable, SemanticPathExpander> createMountExpander = null,
Func<PathTable, IConfiguration> config = null,
Func<PipExecutionContext, FileAccessAllowlist> allowlistCreator = null,
bool useInMemoryCache = true)
{
return WithExecutionEnvironment(
(env) =>
{
return WithIpcServer(
ipcProvider,
ipcExecutor,
new ServerConfig(),
(moniker, server) => act(env, moniker, server));
},
cache,
createMountExpander,
config: config,
allowlistCreator: allowlistCreator,
ipcProvider: ipcProvider);
}
private Task WithExecutionEnvironment(
Func<DummyPipExecutionEnvironment, Task> act,
Func<EngineCache> cache = null,
Func<PathTable, SemanticPathExpander> createMountExpander = null,
Func<PathTable, IConfiguration> config = null,
Func<PipExecutionContext, FileAccessAllowlist> allowlistCreator = null,
IIpcProvider ipcProvider = null)
{
var context = BuildXLContext.CreateInstanceForTesting();
FileAccessAllowlist fileAccessAllowlist = allowlistCreator?.Invoke(context);
return act(
CreateExecutionEnvironment(
context,
cache,
createMountExpander == null ? SemanticPathExpander.Default : createMountExpander(context.PathTable),
config: config,
fileAccessAllowlist: fileAccessAllowlist,
ipcProvider: ipcProvider));
}
private static IConfiguration GetConfiguration(
PathTable pathTable,
bool fileAccessIgnoreCodeCoverage = true,
bool enableLazyOutputs = true,
bool failUnexpectedFileAccesses = true,
bool unexpectedFileAccessesAreErrors = true,
bool allowCopySymlink = true,
PreserveOutputsMode preserveOutputs = PreserveOutputsMode.Disabled,
int? retryCount = null,
bool monitorFileAccesses = true,
OutputReportingMode outputReportingMode = OutputReportingMode.TruncatedOutputOnError,
bool storeOutputsToCache = true)
{
var config = ConfigurationHelpers.GetDefaultForTesting(pathTable, AbsolutePath.Create(pathTable, TestPath));
config.Sandbox.FileAccessIgnoreCodeCoverage = fileAccessIgnoreCodeCoverage;
config.Schedule.EnableLazyOutputMaterialization = enableLazyOutputs;
config.Sandbox.FailUnexpectedFileAccesses = failUnexpectedFileAccesses;
config.Sandbox.UnsafeSandboxConfigurationMutable.UnexpectedFileAccessesAreErrors = unexpectedFileAccessesAreErrors;
config.Sandbox.OutputReportingMode = outputReportingMode;
config.Schedule.AllowCopySymlink = allowCopySymlink;
config.Sandbox.UnsafeSandboxConfigurationMutable.PreserveOutputs = preserveOutputs;
config.Sandbox.UnsafeSandboxConfigurationMutable.MonitorFileAccesses = monitorFileAccesses;
config.Schedule.StoreOutputsToCache = storeOutputsToCache;
if (retryCount.HasValue)
{
config.Schedule.ProcessRetries = retryCount.Value;
}
return config;
}
private static IConfiguration GetConfigurationForCacheConvergence(
PathTable pathTable,
bool fileAccessIgnoreCodeCoverage = true,
bool enableLazyOutputs = true,
bool failUnexpectedFileAccesses = true,
bool unexpectedFileAccessesAreErrors = true,
bool enableDeterminismProbe = false)
{
var config = ConfigurationHelpers.GetDefaultForTesting(pathTable, AbsolutePath.Create(pathTable, TestPath));
config.Sandbox.FileAccessIgnoreCodeCoverage = fileAccessIgnoreCodeCoverage;
config.Schedule.EnableLazyOutputMaterialization = enableLazyOutputs;
config.Sandbox.FailUnexpectedFileAccesses = failUnexpectedFileAccesses;
config.Sandbox.UnsafeSandboxConfigurationMutable.UnexpectedFileAccessesAreErrors = unexpectedFileAccessesAreErrors;
config.Cache.DeterminismProbe = enableDeterminismProbe;
return config;
}
private DummyPipExecutionEnvironment CreateExecutionEnvironmentForCacheConvergence(
BuildXLContext context,
SemanticPathExpander mountExpander = null,
Func<PathTable, IConfiguration> config = null,
FileAccessAllowlist fileAccessAllowlist = null)
{
// TestPipExecutorArtifactContentCache0
IConfiguration configInstance = config == null ? GetConfigurationForCacheConvergence(context.PathTable) : config(context.PathTable);
EngineCache cacheLayer = new EngineCache(
new InMemoryArtifactContentCache(),
new TestPipExecutorTwoPhaseFingerprintStore());
var env = new DummyPipExecutionEnvironment(
CreateLoggingContextForTest(),
context,
configInstance,
pipCache: cacheLayer,
semanticPathExpander: mountExpander,
fileAccessAllowlist: fileAccessAllowlist,
allowUnspecifiedSealedDirectories: false,
subst: TryGetSubstSourceAndTarget(out var substSource, out var substTarget)
? (substSource, substTarget)
: default((string, string)?),
sandboxConnection: GetSandboxConnection());
env.ContentFingerprinter.FingerprintTextEnabled = true;
return env;
}
private DummyPipExecutionEnvironment CreateExecutionEnvironment(
BuildXLContext context,
Func<EngineCache> cache = null,
SemanticPathExpander mountExpander = null,
Func<PathTable, IConfiguration> config = null,
FileAccessAllowlist fileAccessAllowlist = null,
IIpcProvider ipcProvider = null)
{
IConfiguration configInstance = config == null ? GetConfiguration(context.PathTable) : config(context.PathTable);
EngineCache cacheLayer = cache != null
? cache()
: new EngineCache(
new InMemoryArtifactContentCache(),
// Note that we have an 'empty' store (no hits ever) rather than a normal in memory one.
new EmptyTwoPhaseFingerprintStore());
var env = new DummyPipExecutionEnvironment(
CreateLoggingContextForTest(),
context,
configInstance,
pipCache: cacheLayer,
semanticPathExpander: mountExpander,
fileAccessAllowlist: fileAccessAllowlist,
allowUnspecifiedSealedDirectories: false,
ipcProvider: ipcProvider,
subst: TryGetSubstSourceAndTarget(out var substSource, out var substTarget)
? (substSource, substTarget)
: default((string, string)?),
sandboxConnection: GetSandboxConnection());
env.ContentFingerprinter.FingerprintTextEnabled = true;
return env;
}
private static Process CreateCopyProcess(
PipExecutionContext context,
AbsolutePath source,
AbsolutePath destination,
bool omitDependencies = false,
Process.Options options = default,
AbsolutePath? temporaryOutput = null,
ServiceInfo serviceInfo = default,
string destinationFileName = null)
{
var pathTable = context.PathTable;
var pipDataBuilder = new PipDataBuilder(context.StringTable);
if (OperatingSystemHelper.IsUnixOS)
{
pipDataBuilder.Add("-c \"");
}
else
{
pipDataBuilder.Add("/d");
pipDataBuilder.Add("/c");
}
using (pipDataBuilder.StartFragment(PipDataFragmentEscaping.CRuntimeArgumentRules, " "))
{
if (OperatingSystemHelper.IsUnixOS)
{
pipDataBuilder.Add("/bin/cp");
pipDataBuilder.Add(source);
}
else
{
pipDataBuilder.Add("type");
pipDataBuilder.Add(source);
pipDataBuilder.Add(">");
}
if (!string.IsNullOrEmpty(destinationFileName))
{
pipDataBuilder.Add(destinationFileName);
}
else
{
pipDataBuilder.Add(destination);
}
}
if (OperatingSystemHelper.IsUnixOS)
{
using (pipDataBuilder.StartFragment(PipDataFragmentEscaping.NoEscaping, " "))
{
pipDataBuilder.Add("\"");
}
}
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
var outputs = new List<FileArtifactWithAttributes>()
{
FileArtifactWithAttributes.Create(FileArtifact.CreateSourceFile(destination), FileExistence.Required).CreateNextWrittenVersion()
};
if (temporaryOutput != null)
{
outputs.Add(FileArtifactWithAttributes.Create(FileArtifact.CreateSourceFile(temporaryOutput.Value), FileExistence.Temporary).CreateNextWrittenVersion());
}
return AssignFakePipId(new Process(
executable: exe,
workingDirectory: source.GetParent(pathTable),
arguments: pipDataBuilder.ToPipData(" ", PipDataFragmentEscaping.NoEscaping),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: FileArtifact.Invalid,
standardError: FileArtifact.Invalid,
standardDirectory: FileArtifact.CreateSourceFile(destination.GetParent(pathTable)).CreateNextWrittenVersion(),
warningTimeout: null,
timeout: null,
dependencies: omitDependencies ? ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe) : ReadOnlyArray<FileArtifact>.FromWithoutCopy(FileArtifact.CreateSourceFile(source), exe),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.From(outputs),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.Empty,
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
options: options,
serviceInfo: serviceInfo,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty));
}
/// <summary>
/// Seals a directory in the execution environment representing the files to be created by <see cref="CreateDirectoryWithProbeTargets"/>.
/// This may be called only once for each environment and root since it reserves the same directory artifact and paths each time.
/// </summary>
private static DirectoryArtifact SealDirectoryWithProbeTargets(DummyPipExecutionEnvironment env, AbsolutePath root, bool omitContents = false)
{
var directoryArtifact = DirectoryArtifact.CreateWithZeroPartialSealId(root);
if (!omitContents)
{
AbsolutePath fileA = root.Combine(env.Context.PathTable, PathAtom.Create(env.Context.StringTable, "a"));
AbsolutePath fileB = root.Combine(env.Context.PathTable, PathAtom.Create(env.Context.StringTable, "b"));
env.SetSealedDirectoryContents(directoryArtifact, FileArtifact.CreateSourceFile(fileA), FileArtifact.CreateSourceFile(fileB));
}
else
{
env.SetSealedDirectoryContents(directoryArtifact);
}
return directoryArtifact;
}
/// <summary>
/// Seals a directory in the execution environment representing the files to be created by <see cref="CreateDirectoryWithProbeTargets"/>.
/// This may be called only once for each environment and root since it reserves the same directory artifact and paths each time.
/// </summary>
private static DirectoryArtifact SealSourceDirectoryWithProbeTargets(DummyPipExecutionEnvironment env, AbsolutePath root, bool allDirectories, params AbsolutePath[] dynamicallyAccessedPathInDirectory)
{
var directoryArtifact = SealDirectoryWithProbeTargets(env, root, omitContents: true);
env.SetSealedSourceDirectory(directoryArtifact, allDirectories, dynamicallyAccessedPathInDirectory);
return directoryArtifact;
}
/// <summary>
/// Creates an input directory for <see cref="CreateDirectoryProbingProcess" />. If null is specified for file contents,
/// that file is ensured to not exist.
/// This may be called multiple times in one test. Returns the expected contents of the output file.
/// </summary>
private static string CreateDirectoryWithProbeTargets(
PathTable pathTable,
AbsolutePath directory,
string fileAContents = null,
string fileBContents = null,
string fileCContents = null)
{
string expanded = directory.ToString(pathTable);
Directory.CreateDirectory(expanded);
string fileA = Path.Combine(expanded, "a");
string fileB = Path.Combine(expanded, "b");
string folderC = Path.Combine(expanded, "CFolder");
string fileC = Path.Combine(folderC, "c");
if (fileAContents == null)
{
File.Delete(fileA);
}
else
{
File.WriteAllText(fileA, fileAContents);
}
if (fileBContents == null)
{
File.Delete(fileB);
}
else
{
File.WriteAllText(fileB, fileBContents);
}
if (fileCContents == null)
{
if (Directory.Exists(folderC))
{
File.Delete(fileC);
Directory.Delete(folderC);
}
}
else
{
Directory.CreateDirectory(folderC);
File.WriteAllText(fileC, fileCContents);
}
return (fileAContents ?? fileBContents ?? string.Empty) + fileCContents + Environment.NewLine;
}
/// <summary>
/// Seals a directory in the execution environment representing the files to be created by <see cref="CreateMultipleDirectoriesWithProbeTargets"/>.
/// This may be called only once for each environment and root since it reserves the same directory artifact and paths each time.
/// </summary>
private static Tuple<DirectoryArtifact, DirectoryArtifact> SealMultipleDirectoriesWithProbeTargets(DummyPipExecutionEnvironment env, AbsolutePath root)
{
AbsolutePath rootA = root.Combine(env.Context.PathTable, PathAtom.Create(env.Context.StringTable, "a"));
AbsolutePath fileA = rootA.Combine(env.Context.PathTable, PathAtom.Create(env.Context.StringTable, "a"));
var directoryArtifactA = DirectoryArtifact.CreateWithZeroPartialSealId(rootA);
AbsolutePath rootB = root.Combine(env.Context.PathTable, PathAtom.Create(env.Context.StringTable, "b"));
AbsolutePath fileB = rootB.Combine(env.Context.PathTable, PathAtom.Create(env.Context.StringTable, "b"));
var directoryArtifactB = DirectoryArtifact.CreateWithZeroPartialSealId(rootB);
env.SetSealedDirectoryContents(directoryArtifactA, FileArtifact.CreateSourceFile(fileA));
env.SetSealedDirectoryContents(directoryArtifactB, FileArtifact.CreateSourceFile(fileB));
return Tuple.Create(directoryArtifactA, directoryArtifactB);
}
/// <summary>
/// Creates input directories for <see cref="CreateMultiDirectoryProbingProcess" />. If null is specified for file
/// contents, that file is ensured to not exist.
/// This may be called multiple times in one test. Returns the expected contents of the output file.
/// </summary>
private static string CreateMultipleDirectoriesWithProbeTargets(
PathTable pathTable,
AbsolutePath directory,
string fileAContents = null,
string fileBContents = null)
{
string expanded = directory.ToString(pathTable);
string fileA = Path.Combine(expanded, OperatingSystemHelper.IsUnixOS ? "a/a" : @"a\a");
string fileB = Path.Combine(expanded, OperatingSystemHelper.IsUnixOS ? "b/b" : @"b\b");
Directory.CreateDirectory(Path.GetDirectoryName(fileA));
Directory.CreateDirectory(Path.GetDirectoryName(fileB));
if (fileAContents == null)
{
File.Delete(fileA);
}
else
{
File.WriteAllText(fileA, fileAContents);
}
if (fileBContents == null)
{
File.Delete(fileB);
}
else
{
File.WriteAllText(fileB, fileBContents);
}
return (fileAContents ?? fileBContents ?? string.Empty) + Environment.NewLine;
}
/// <summary>
/// The returned process will probe for 'a' in the given directory, and if not found will then probe for 'b'.
/// </summary>
private static Process CreateDirectoryProbingProcess(PipExecutionContext context, DirectoryArtifact directory, AbsolutePath output, string script = null, DirectoryArtifact? directoryOutput = null)
{
if (script == null)
{
script = OperatingSystemHelper.IsUnixOS ? "if [ -f a ]; then /bin/cat a; else /bin/cat b; fi;" : "if exist a (type a) else (type b)";
}
var pathTable = context.PathTable;
FileArtifact stdout = FileArtifact.CreateSourceFile(output).CreateNextWrittenVersion();
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
return AssignFakePipId(new Process(
executable: exe,
workingDirectory: directory.Path,
arguments: PipDataBuilder.CreatePipData(
pathTable.StringTable,
" ",
PipDataFragmentEscaping.NoEscaping,
OperatingSystemHelper.IsUnixOS ? "-c \" " + script + " \"" : "/d /c (" + script + ") & echo."
),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: stdout,
standardError: FileArtifact.Invalid,
standardDirectory: output.GetParent(pathTable),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.FromWithoutCopy(stdout.WithAttributes()),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.FromWithoutCopy(directory),
directoryOutputs: directoryOutput.HasValue ? ReadOnlyArray<DirectoryArtifact>.FromWithoutCopy(new[] { directoryOutput.Value }) : ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty));
}
/// <summary>
/// The returned process will probe for 'a\a' in the given directory, and if not found will then probe for 'b\b'.
/// The dependency is represented as a directory artifact.
/// </summary>
private static Process CreateMultiDirectoryProbingProcess(PipExecutionContext context, AbsolutePath directoryRoot, Tuple<DirectoryArtifact, DirectoryArtifact> subdirectories, AbsolutePath output)
{
var pathTable = context.PathTable;
FileArtifact stdout = FileArtifact.CreateSourceFile(output).CreateNextWrittenVersion();
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
return AssignFakePipId(new Process(
executable: exe,
workingDirectory: directoryRoot,
arguments: PipDataBuilder.CreatePipData(
context.StringTable,
" ",
PipDataFragmentEscaping.NoEscaping,
OperatingSystemHelper.IsUnixOS ? "-c \"if [ -f a/a ]; then /bin/cat a/a; else /bin/cat b/b; fi;\"" : @"/d /c (if exist a\a (type a\a) else (type b\b)) & echo."
),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: stdout,
standardError: FileArtifact.Invalid,
standardDirectory: output.GetParent(pathTable),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.FromWithoutCopy(stdout.WithAttributes()),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.FromWithoutCopy(subdirectories.Item1, subdirectories.Item2),
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty));
}
/// <summary>
/// The returned process will probe and repeat the content of outputToCheck into resultPath. It will then overwrite outputToCheck.
/// </summary>
private static Process CreatePreviousOutputCheckerProcess(PipExecutionContext context, AbsolutePath directoryRoot, AbsolutePath outputToCheck, AbsolutePath resultPath, bool allowPreserveOutputs)
{
var pathTable = context.PathTable;
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
return AssignFakePipId(new Process(
executable: exe,
workingDirectory: directoryRoot,
arguments: PipDataBuilder.CreatePipData(
context.StringTable,
" ",
PipDataFragmentEscaping.NoEscaping,
string.Format(OperatingSystemHelper.IsUnixOS ? "-c \"/bin/cat {0} > {1}; echo 'hi' > {0}\"" : "/d /c type {0} 1> {1} 2>&1 & echo 'hi' > {0}",
outputToCheck.ToString(context.PathTable),
resultPath.ToString(context.PathTable))
),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: FileArtifact.Invalid,
standardError: FileArtifact.Invalid,
standardDirectory: resultPath.GetParent(pathTable),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(new FileArtifact[] { exe }),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.FromWithoutCopy(
new FileArtifact(outputToCheck).CreateNextWrittenVersion().WithAttributes(),
new FileArtifact(resultPath).CreateNextWrittenVersion().WithAttributes()),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.Empty,
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty,
options: allowPreserveOutputs ? Process.Options.AllowPreserveOutputs : Process.Options.None));
}
private static Process CreateCmdProcess(
PipExecutionContext context,
string script,
AbsolutePath workingDirectory,
AbsolutePath standardDirectory,
IReadOnlyList<EnvironmentVariable> environmentVariables = null,
IReadOnlyList<FileArtifact> dependencies = null,
IReadOnlyList<FileArtifactWithAttributes> outputs = null,
IReadOnlyList<DirectoryArtifact> directoryDependencies = null,
IReadOnlyList<DirectoryArtifact> directoryOutputs = null,
IReadOnlyList<AbsolutePath> untrackedPaths = null,
IReadOnlyList<AbsolutePath> untrackedScopes = null,
IReadOnlyList<int> successExitCodes = null,
Process.Options options = Process.Options.None)
{
Contract.Requires(!string.IsNullOrWhiteSpace(script));
var pathTable = context.PathTable;
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
return AssignFakePipId(new Process(
executable: exe,
workingDirectory: workingDirectory,
arguments: PipDataBuilder.CreatePipData(
pathTable.StringTable,
" ",
PipDataFragmentEscaping.NoEscaping,
OperatingSystemHelper.IsUnixOS ? I($"-c \"{script}\"") : "/d /c " + script
),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: FromReadOnlyList(environmentVariables),
standardInput: FileArtifact.Invalid,
standardOutput: FileArtifact.Invalid,
standardError: FileArtifact.Invalid,
standardDirectory: standardDirectory,
warningTimeout: null,
timeout: null,
dependencies: FromReadOnlyList(dependencies, exe),
outputs: FromReadOnlyList(outputs),
directoryDependencies: FromReadOnlyList(directoryDependencies),
directoryOutputs: FromReadOnlyList(directoryOutputs),
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: FromReadOnlyList(untrackedPaths, CmdHelper.GetCmdDependencies(pathTable).ToArray()),
untrackedScopes: FromReadOnlyList(untrackedScopes, CmdHelper.GetCmdDependencyScopes(pathTable).ToArray()),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: FromReadOnlyList(successExitCodes),
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty,
options: options));
}
private static ReadOnlyArray<T> FromReadOnlyList<T>(IReadOnlyList<T> list, params T[] additions)
{
if (list == null || list.Count == 0)
{
if (additions.Length == 0)
{
return ReadOnlyArray<T>.Empty;
}
return ReadOnlyArray<T>.From(additions);
}
if (additions.Length == 0)
{
return ReadOnlyArray<T>.FromWithoutCopy(list.ToArray());
}
return ReadOnlyArray<T>.From(list.Concat(additions));
}
[Fact]
public Task CopyFileWritable()
{
const string Contents = "Matches!";
const string BadContents = "Anti-Matches!";
return WithCachingExecutionEnvironment(
GetFullPath(".cache"),
async env =>
{
env.InMemoryContentCache.ReinitializeRealizationModeTracking();
string source = GetFullPath("source");
AbsolutePath sourceAbsolutePath = AbsolutePath.Create(env.Context.PathTable, source);
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
File.WriteAllText(destination, BadContents);
File.WriteAllText(source, Contents);
var pip = new CopyFile(
FileArtifact.CreateSourceFile(sourceAbsolutePath),
FileArtifact.CreateOutputFile(destinationAbsolutePath),
ReadOnlyArray<StringId>.Empty,
PipProvenance.CreateDummy(env.Context),
global::BuildXL.Pips.Operations.CopyFile.Options.OutputsMustRemainWritable);
var testRunChecker = new TestRunChecker(destination);
await testRunChecker.VerifySucceeded(env, pip, Contents);
await testRunChecker.VerifyUpToDate(env, pip, Contents);
// The file should be writable (i.e., copy realization mode), since we opted out of hardlinking. So we can modify it, and expect the pip to re-run.
XAssert.AreEqual(FileRealizationMode.Copy, env.InMemoryContentCache.GetRealizationMode(destination));
env.InMemoryContentCache.ReinitializeRealizationModeTracking();
XAssert.IsTrue(File.Exists(destination), "File does not exist");
try
{
File.WriteAllText(destination, BadContents);
}
catch (UnauthorizedAccessException)
{
XAssert.Fail("Failed writing to the destination file. This implies that the outputs were not left writable, despite requesting that via Process.Options.OutputsMustRemainWritable");
}
});
}
[Theory]
[InlineData(WriteFilePip.PathRenderingOption.None)]
[InlineData(WriteFilePip.PathRenderingOption.BackSlashes)]
[InlineData(WriteFilePip.PathRenderingOption.EscapedBackSlashes)]
[InlineData(WriteFilePip.PathRenderingOption.ForwardSlashes)]
public Task WriteFileWithPathRenderingOptions(WriteFilePip.PathRenderingOption option)
{
return WithExecutionEnvironment(
async env =>
{
string destination = GetFullPath("dest");
AbsolutePath destinationAbsolutePath = AbsolutePath.Create(env.Context.PathTable, destination);
FileArtifact destinationArtifact = FileArtifact.CreateSourceFile(destinationAbsolutePath).CreateNextWrittenVersion();
PipData contents = PipDataBuilder.CreatePipData(
env.Context.StringTable,
"",
PipDataFragmentEscaping.NoEscaping,
destinationAbsolutePath);
var pip = new WriteFilePip(destinationArtifact, contents, WriteFileEncoding.Utf8, ReadOnlyArray<StringId>.Empty, PipProvenance.CreateDummy(env.Context), new WriteFilePip.Options(option));
await VerifyPipResult(PipResultStatus.Succeeded, env, pip);
string actual = File.ReadAllText(destination);
string expected = "";
switch(option)
{
case WriteFilePip.PathRenderingOption.None:
expected = destinationAbsolutePath.ToString(env.Context.PathTable);
break;
case WriteFilePip.PathRenderingOption.BackSlashes:
expected = destinationAbsolutePath.ToString(env.Context.PathTable, PathFormat.Windows);
break;
case WriteFilePip.PathRenderingOption.EscapedBackSlashes:
expected = destinationAbsolutePath.ToString(env.Context.PathTable, PathFormat.Windows).Replace(@"\", @"\\"); ;
break;
case WriteFilePip.PathRenderingOption.ForwardSlashes:
expected = destinationAbsolutePath.ToString(env.Context.PathTable, PathFormat.Script);
break;
}
XAssert.AreEqual(expected, actual);
});
}
/// <summary>
/// The returned process will produce a warning.
/// </summary>
private Process CreateWarningProcess(PipExecutionContext context, AbsolutePath directory, AbsolutePath output, bool extraWarningMessage = false)
{
var pathTable = context.PathTable;
var command = new StringBuilder();
if (!OperatingSystemHelper.IsUnixOS)
{
command.AppendLine("@echo off");
}
command.AppendLine("echo WARNING");
if (extraWarningMessage)
{
command.AppendLine("echo EXTRA");
}
string cmdScript = OperatingSystemHelper.IsUnixOS ? GetFullPath("script.sh") : GetFullPath("script.cmd");
File.WriteAllText(cmdScript, command.ToString());
if (OperatingSystemHelper.IsUnixOS)
{
chmod(cmdScript, 0x1ff);
}
FileArtifact cmdScriptFile = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, cmdScript));
FileArtifact stdout = FileArtifact.CreateSourceFile(output).CreateNextWrittenVersion();
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
return AssignFakePipId(new Process(
executable: exe,
workingDirectory: directory,
arguments: PipDataBuilder.CreatePipData(
pathTable.StringTable,
" ",
PipDataFragmentEscaping.NoEscaping,
OperatingSystemHelper.IsUnixOS ? I($"-c \"{cmdScript}\"") : I($"/d /c {cmdScript}")),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: stdout,
standardError: FileArtifact.Invalid,
standardDirectory: output.GetParent(pathTable),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe, cmdScriptFile),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.FromWithoutCopy(stdout.WithAttributes()),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.Empty,
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty,
warningRegex: new RegexDescriptor(StringId.Create(context.StringTable, "WARNING"), RegexOptions.IgnoreCase)));
}
private static string[] GenerateTestErrorMessages(int errorMessageLength)
{
if (errorMessageLength <= 0)
{
return new string[0];
}
List<string> toEcho = new List<string>();
StringBuilder sb = new StringBuilder(errorMessageLength);
for (int i = 0; i < errorMessageLength; i++)
{
if (i == errorMessageLength - 1)
{
sb.Append('Z');
}
else if (i > 4 && i % (i / 4) == 0)
{
toEcho.Add(sb.ToString());
sb.Clear();
}
else if (i == 0)
{
sb.Append('A');
}
else
{
sb.Append('M');
}
}
toEcho.Add(sb.ToString());
return toEcho.ToArray();
}
/// <summary>
/// This method returns a process that will fail. It produces multi-line output with custom error regex (or default, if not provided).
/// </summary>
private Process CreateErrorProcess(
PipExecutionContext context,
AbsolutePath directory,
AbsolutePath output,
string errorPattern = null,
int errorMessageLength = 0,
string scriptContent = null)
{
var pathTable = context.PathTable;
RegexDescriptor errorRegex = errorPattern == null
? default
: new RegexDescriptor(StringId.Create(context.StringTable, errorPattern), RegexOptions.IgnoreCase);
if (scriptContent == null)
{
string[] errorMessages = GenerateTestErrorMessages(errorMessageLength);
StringBuilder command = new StringBuilder();
if (!OperatingSystemHelper.IsUnixOS)
{
command.AppendLine("@echo off");
}
command.AppendLine(I($"echo ERROR "));
foreach (var errorMessage in errorMessages)
{
command.AppendLine(I($"echo ERROR {errorMessage}"));
}
command.AppendLine("echo WARNING");
scriptContent = command.ToString();
}
string cmdScript = OperatingSystemHelper.IsUnixOS ? GetFullPath("script.sh") : GetFullPath("script.cmd");
File.WriteAllText(cmdScript, scriptContent);
if (OperatingSystemHelper.IsUnixOS)
{
chmod(cmdScript, 0x1ff);
}
FileArtifact cmdScriptFile = FileArtifact.CreateSourceFile(AbsolutePath.Create(context.PathTable, cmdScript));
FileArtifact stdout = FileArtifact.CreateSourceFile(output).CreateNextWrittenVersion();
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
return AssignFakePipId(
new Process(
executable: exe,
workingDirectory: directory,
arguments: PipDataBuilder.CreatePipData(
pathTable.StringTable,
" ",
PipDataFragmentEscaping.NoEscaping,
OperatingSystemHelper.IsUnixOS ? I($"-c \"{cmdScript}\"") : I($"/d /c {cmdScript}")
),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: stdout,
standardError: FileArtifact.Invalid,
standardDirectory: output.GetParent(pathTable),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe, cmdScriptFile),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.FromWithoutCopy(stdout.WithAttributes()),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.Empty,
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.From(new[] { 777 }),
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty,
errorRegex: errorRegex));
}
/// <summary>
/// The returned process will produce a warning.
/// </summary>
private static Process CreateEchoProcess(PipExecutionContext context, AbsolutePath source, List<AbsolutePath> destinations, uint pipId, string param)
{
var pathTable = context.PathTable;
FileArtifact exe = FileArtifact.CreateSourceFile(AbsolutePath.Create(pathTable, CmdHelper.OsShellExe));
List<FileArtifactWithAttributes> outputs = new List<FileArtifactWithAttributes>();
foreach (AbsolutePath destination in destinations)
{
outputs.Add(FileArtifactWithAttributes.Create(FileArtifact.CreateSourceFile(destination), FileExistence.Required).CreateNextWrittenVersion());
}
return AssignFakePipId(new Process(
executable: exe,
workingDirectory: source.GetParent(pathTable),
arguments: PipDataBuilder.CreatePipData(
pathTable.StringTable,
" ",
PipDataFragmentEscaping.NoEscaping,
OperatingSystemHelper.IsUnixOS
? "-c ' echo " + param + " '"
: "/d /c echo " + param),
responseFile: FileArtifact.Invalid,
responseFileData: PipData.Invalid,
environmentVariables: ReadOnlyArray<EnvironmentVariable>.Empty,
standardInput: FileArtifact.Invalid,
standardOutput: FileArtifact.Invalid,
standardError: FileArtifact.Invalid,
standardDirectory: FileArtifact.CreateSourceFile(destinations[0].GetParent(pathTable)).CreateNextWrittenVersion(),
warningTimeout: null,
timeout: null,
dependencies: ReadOnlyArray<FileArtifact>.FromWithoutCopy(exe),
outputs: ReadOnlyArray<FileArtifactWithAttributes>.From(outputs),
directoryDependencies: ReadOnlyArray<DirectoryArtifact>.Empty,
directoryOutputs: ReadOnlyArray<DirectoryArtifact>.Empty,
orderDependencies: ReadOnlyArray<PipId>.Empty,
untrackedPaths: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencies(pathTable)),
untrackedScopes: ReadOnlyArray<AbsolutePath>.From(CmdHelper.GetCmdDependencyScopes(pathTable)),
tags: ReadOnlyArray<StringId>.Empty,
successExitCodes: ReadOnlyArray<int>.Empty,
semaphores: ReadOnlyArray<ProcessSemaphoreInfo>.Empty,
provenance: PipProvenance.CreateDummy(context),
toolDescription: StringId.Invalid,
additionalTempDirectories: ReadOnlyArray<AbsolutePath>.Empty), pipId);
}
private static TPip AssignFakePipId<TPip>(TPip pip, uint pipId = 123) where TPip : Pip
{
pip.PipId = new PipId(pipId);
return pip;
}
/// <summary>
/// Helper class that keeps track of the counters in the env.
/// </summary>
private class TestRunChecker
{
private int m_expectedRunCount;
private int m_expectedUpToDateCount;
private int m_expectedDeployedFromCache;
private int m_expectedWarningCount;
private int m_expectedWarningFromCacheCount;
private string m_destination;
public TestRunChecker(string destination = null)
{
m_destination = destination;
}
public string ExpectedContentsSuffix { get; set; }
public void ExpectWarning(int nrOfWarnigns = 1)
{
m_expectedWarningCount += nrOfWarnigns;
}
public void ExpectWarningFromCache(int nrOfWarnigns = 1)
{
m_expectedWarningFromCacheCount += nrOfWarnigns;
}
public Task VerifySucceeded(DummyPipExecutionEnvironment env, Pip pip, string expectedContents = null, bool expectMarkedPerpetuallyDirty = false)
{
Contract.Assert((m_destination == null) == (expectedContents == null), "must specify expected contents if you set a destination file to check");
m_expectedRunCount++;
return Verify(PipResultStatus.Succeeded, env, pip, expectedContents, expectMarkedPerpetuallyDirty);
}
public Task VerifyUpToDate(DummyPipExecutionEnvironment env, Pip pip, string expectedContents = null, bool expectMarkedPerpertuallyDirty = false)
{
Contract.Assert((m_destination == null) == (expectedContents == null), "must specify expected contents if you set a destination file to check");
m_expectedUpToDateCount++;
return Verify(PipResultStatus.UpToDate, env, pip, expectedContents, expectMarkedPerpertuallyDirty);
}
public Task VerifyFailed(DummyPipExecutionEnvironment env, Pip pip, string expectedContents = null, bool expectMarkedPerpertuallyDirty = false)
{
return Verify(PipResultStatus.Failed, env, pip, expectedContents, expectMarkedPerpertuallyDirty);
}
public Task VerifyDeployedFromCache(DummyPipExecutionEnvironment env, Pip pip, string expectedContents = null)
{
m_expectedDeployedFromCache++;
return Verify(PipResultStatus.DeployedFromCache, env, pip, expectedContents);
}
public async Task Verify(PipResultStatus expectedStatus, DummyPipExecutionEnvironment env, Pip pip, string expectedContents = null, bool expectMarkedPerpertuallyDirty = false)
{
Contract.Assert((m_destination == null) == (expectedContents == null), "must specify expected contents if you set a destination file to check");
await VerifyPipResult(expectedStatus, env, pip, expectMarkedPerpetuallyDirty: expectMarkedPerpertuallyDirty);
XAssert.AreEqual(m_expectedRunCount, env.OutputFilesProduced, "produced count");
XAssert.AreEqual(m_expectedUpToDateCount, env.OutputFilesUpToDate, "up to date count");
XAssert.AreEqual(m_expectedDeployedFromCache, env.OutputFilesDeployedFromCache, "deployed from cache count");
XAssert.AreEqual(m_expectedWarningFromCacheCount, env.WarningsFromCache);
XAssert.AreEqual(m_expectedWarningFromCacheCount, env.PipsWithWarningsFromCache);
XAssert.AreEqual(m_expectedWarningCount, env.Warnings);
XAssert.AreEqual(m_expectedWarningCount, env.PipsWithWarnings);
if (m_destination != null)
{
XAssert.AreEqual(
I($"'{expectedContents + ExpectedContentsSuffix}'"),
I($"'{File.ReadAllText(m_destination)}'"),
"File contents of: " + m_destination);
}
}
}
}
// This class creates a fake ITwoPhaseFingerprintStore implementation to be used for mocking the
// behavior of duplicated cache entry for this strong fingerprint in the L2 cache.
// It is necessary to test the convergence of cache content.
internal sealed class TestPipExecutorTwoPhaseFingerprintStore : ITwoPhaseFingerprintStore
{
internal WeakContentFingerprint M_weakFingerprint = WeakContentFingerprint.Zero;
internal ContentHash M_pathSetHash = ContentHashingUtilities.ZeroHash;
internal StrongContentFingerprint M_strongFingerprint = StrongContentFingerprint.Zero;
internal int M_publishTries = 0;
internal bool DoneOnce = false;
internal List<long> M_len = new List<long>(2);
internal CacheEntry M_cacheEntry;
internal TestPipExecutorTwoPhaseFingerprintStore()
{
}
private async Task<Possible<PublishedEntryRef, Failure>> GetOne()
{
await FinishedTask;
if (M_publishTries == 0)
{
return PublishedEntryRef.Default;
}
return new PublishedEntryRef(M_pathSetHash, M_strongFingerprint, "foobar", PublishedEntryRefLocality.Remote);
}
public IEnumerable<Task<Possible<PublishedEntryRef, Failure>>> ListPublishedEntriesByWeakFingerprint(WeakContentFingerprint weak, OperationHints hints)
{
yield return GetOne();
}
public Task<Possible<CacheEntry?, Failure>> TryGetCacheEntryAsync(
WeakContentFingerprint weakFingerprint,
ContentHash pathSetHash,
StrongContentFingerprint strongFingerprint,
OperationHints hints = default)
{
return Task.FromResult(new Possible<CacheEntry?, Failure>(M_cacheEntry));
}
public async Task<Possible<CacheEntryPublishResult, Failure>> TryPublishCacheEntryAsync(
WeakContentFingerprint weakFingerprint,
ContentHash pathSetHash,
StrongContentFingerprint strongFingerprint,
CacheEntry entry,
CacheEntryPublishMode mode = CacheEntryPublishMode.CreateNew,
PublishCacheEntryOptions options = default)
{
await FinishedTask;
if (M_publishTries == 0)
{
M_weakFingerprint = weakFingerprint;
M_pathSetHash = pathSetHash;
M_strongFingerprint = strongFingerprint;
M_cacheEntry = entry;
}
M_publishTries++;
if (!DoneOnce)
{
DoneOnce = true;
return CacheEntryPublishResult.CreatePublishedResult();
}
return CacheEntryPublishResult.CreateConflictResult(M_cacheEntry);
}
// This is finished task that is used to prevent compilation warning in async method with no reasonable await.
private static Task FinishedTask { get; } = Task.FromResult(42);
}
}
| 54.102665 | 242 | 0.595745 | [
"MIT"
] | Microsoft/BuildXL | Public/Src/Engine/UnitTests/Scheduler/PipExecutorTest.cs | 192,876 | C# |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Management.Automation;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Linq;
namespace PowerShellCourse
{
[Cmdlet(VerbsCommon.Get, "SnowReport")]
public class GetSnowReportCommand : PSCmdlet
{
[Parameter(Position = 1, ValueFromPipeline = true)]
public string Country { get; set; }
protected override void ProcessRecord()
{
XDocument feed = GetFeed();
if (feed == null)
{
return;
}
var items = feed.Descendants("item");
foreach (var item in items)
{
var report = SnowReport.Create(item);
WriteObject(report);
}
}
private XDocument GetFeed()
{
XDocument feed = null;
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(string.Format("http://www.onthesnow.co.uk/" + Country + "/snow-rss.html"));
using (var response = request.GetResponse())
{
using (var stream = response.GetResponseStream())
{
using (StreamReader reader = new StreamReader(stream, Encoding.UTF8))
{
string content = reader.ReadToEnd();
feed = XDocument.Parse(content);
}
}
}
return feed;
}
}
} | 25.538462 | 140 | 0.621988 | [
"MIT"
] | psymonn/myPowerShell | Sample Modules/extending-powershell/materials/4-extending-powershell-m3-output-exercise-files/after/GetSnowReportCommand.cs | 1,330 | C# |
using ChromeCast.Classes;
using ChromeCast.Device.Classes;
using ChromeCast.Device.Log.Interfaces;
using ChromeCast.Device.ProtocolBuffer;
using System.Text.Json;
using System;
using System.Threading.Tasks;
namespace ChromeCast.Device.Application
{
public class DeviceCommunication
{
private DeviceState state = DeviceState.Closed;
private readonly ILogger logger;
private DateTime playerPlayTime;
public DeviceCommunication(ILogger loggerIn)
{
logger = loggerIn;
}
public void ProcessMessage(DeviceListener deviceListener, CastMessage castMessage)
{
var options = new JsonSerializerOptions { IncludeFields = true };
var message = JsonSerializer.Deserialize<PayloadMessageBase>(castMessage.PayloadUtf8, options);
switch (message.type)
{
case "SET_VOLUME":
if (castMessage.PayloadUtf8.Contains("muted", System.StringComparison.CurrentCulture))
{
var volumeMuteMessage = JsonSerializer.Deserialize<MessageVolumeMute>(castMessage.PayloadUtf8, options);
SystemCalls.SetMute(volumeMuteMessage.volume.muted);
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(volumeMuteMessage.requestId, state, SecondsPlaying()), state);
deviceListener.Write(ChromeCastMessages.ReceiverStatusMessage(volumeMuteMessage.requestId), state);
}
else
{
var volumeMessage = JsonSerializer.Deserialize<MessageVolume>(castMessage.PayloadUtf8, options);
SystemCalls.SetVolume(volumeMessage.volume.level);
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(volumeMessage.requestId, state, SecondsPlaying()), state);
deviceListener.Write(ChromeCastMessages.ReceiverStatusMessage(volumeMessage.requestId), state);
}
break;
case "CONNECT":
state = DeviceState.Connected;
break;
case "CLOSE":
state = DeviceState.Closed;
var closeMessage = JsonSerializer.Deserialize<MessageStop>(castMessage.PayloadUtf8, options);
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(closeMessage.requestId, state, 0), state);
break;
case "LAUNCH":
state = DeviceState.Launching;
var launchMessage = JsonSerializer.Deserialize<MessageLaunch>(castMessage.PayloadUtf8, options);
deviceListener.Write(ChromeCastMessages.ReceiverStatusMessage(launchMessage.requestId), state);
break;
case "LOAD":
state = DeviceState.Loading;
var loadMessage = JsonSerializer.Deserialize<MessageLoad>(castMessage.PayloadUtf8, options);
logger.Log($"[{state}] Start playing: {loadMessage?.media?.contentId}");
SystemCalls.StartPlaying(loadMessage.media.contentId);
playerPlayTime = DateTime.Now;
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(loadMessage.requestId, state, SecondsPlaying()), state);
state = DeviceState.Buffering;
Task.Delay(2000).Wait();
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(loadMessage.requestId, state, SecondsPlaying()), state);
break;
case "PAUSE":
state = DeviceState.Paused;
var pauseMessage = JsonSerializer.Deserialize<MessagePause>(castMessage.PayloadUtf8, options);
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(pauseMessage.requestId, state, SecondsPlaying()), state);
break;
case "PLAY":
break;
case "STOP":
state = DeviceState.Idle;
var stopMessage = JsonSerializer.Deserialize<MessageStop>(castMessage.PayloadUtf8, options);
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(stopMessage.requestId, state, 0), state);
SystemCalls.StopPlaying();
break;
case "PING":
break;
case "PONG":
break;
case "GET_STATUS":
var getstatusMessage = JsonSerializer.Deserialize<MessageStatus>(castMessage.PayloadUtf8, options);
if (state== DeviceState.Buffering)
state = DeviceState.Playing;
switch (state)
{
case DeviceState.Idle:
case DeviceState.Closed:
case DeviceState.Connected:
deviceListener.Write(ChromeCastMessages.ReceiverStatusMessage(getstatusMessage.requestId), state);
break;
case DeviceState.Playing:
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(getstatusMessage.requestId, state, SecondsPlaying()), state);
break;
default:
deviceListener.Write(ChromeCastMessages.ReceiverStatusMessage(getstatusMessage.requestId), state);
break;
}
break;
default:
logger.Log($"in default [{DateTime.Now.ToLongTimeString()}] {message.type} {castMessage.PayloadUtf8}");
break;
}
}
public void SendNewVolume(float level, DeviceListener deviceListener)
{
deviceListener.Write(ChromeCastMessages.MediaStatusMessage(0, state, SecondsPlaying()), state);
}
private float SecondsPlaying()
{
if (state == DeviceState.Playing || state == DeviceState.Loading)
{
return (float)(DateTime.Now - playerPlayTime).TotalSeconds;
}
else
{
return 0;
}
}
}
public enum DeviceState
{
Closed,
Connected,
Launching,
Loading,
Buffering,
Playing,
Paused,
Idle,
}
}
| 45.613793 | 148 | 0.568189 | [
"MIT"
] | SamDel/ChromeCast-Device | Source/ChromeCast.Device/Application/DeviceCommunication.cs | 6,616 | C# |
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading.Tasks;
using Pulumi.Serialization;
namespace Pulumi.AzureNextGen.Devices.V20200831Preview.Inputs
{
/// <summary>
/// The properties of an IoT hub shared access policy.
/// </summary>
public sealed class SharedAccessSignatureAuthorizationRuleArgs : Pulumi.ResourceArgs
{
/// <summary>
/// The name of the shared access policy.
/// </summary>
[Input("keyName", required: true)]
public Input<string> KeyName { get; set; } = null!;
/// <summary>
/// The primary key.
/// </summary>
[Input("primaryKey")]
public Input<string>? PrimaryKey { get; set; }
/// <summary>
/// The permissions assigned to the shared access policy.
/// </summary>
[Input("rights", required: true)]
public Input<Pulumi.AzureNextGen.Devices.V20200831Preview.AccessRights> Rights { get; set; } = null!;
/// <summary>
/// The secondary key.
/// </summary>
[Input("secondaryKey")]
public Input<string>? SecondaryKey { get; set; }
public SharedAccessSignatureAuthorizationRuleArgs()
{
}
}
}
| 30.468085 | 109 | 0.615223 | [
"Apache-2.0"
] | pulumi/pulumi-azure-nextgen | sdk/dotnet/Devices/V20200831Preview/Inputs/SharedAccessSignatureAuthorizationRuleArgs.cs | 1,432 | C# |
using Steamworks;
using System;
using System.Collections;
using UnityEngine;
namespace Mirror.FizzySteam
{
public abstract class LegacyCommon
{
private EP2PSend[] channels;
private int internal_ch => channels.Length;
protected enum InternalMessages : byte
{
CONNECT,
ACCEPT_CONNECT,
DISCONNECT
}
private Callback<P2PSessionRequest_t> callback_OnNewConnection = null;
private Callback<P2PSessionConnectFail_t> callback_OnConnectFail = null;
protected readonly FizzySteamworks transport;
protected LegacyCommon(FizzySteamworks transport)
{
channels = transport.Channels;
callback_OnNewConnection = Callback<P2PSessionRequest_t>.Create(OnNewConnection);
callback_OnConnectFail = Callback<P2PSessionConnectFail_t>.Create(OnConnectFail);
this.transport = transport;
}
protected void Dispose()
{
if (callback_OnNewConnection != null)
{
callback_OnNewConnection.Dispose();
callback_OnNewConnection = null;
}
if (callback_OnConnectFail != null)
{
callback_OnConnectFail.Dispose();
callback_OnConnectFail = null;
}
}
protected abstract void OnNewConnection(P2PSessionRequest_t result);
private void OnConnectFail(P2PSessionConnectFail_t result)
{
OnConnectionFailed(result.m_steamIDRemote);
CloseP2PSessionWithUser(result.m_steamIDRemote);
switch (result.m_eP2PSessionError)
{
case 1:
Debug.LogError("Connection failed: The target user is not running the same game.");
break;
case 2:
Debug.LogError("Connection failed: The local user doesn't own the app that is running.");
break;
case 3:
Debug.LogError("Connection failed: Target user isn't connected to Steam.");
break;
case 4:
Debug.LogError("Connection failed: The connection timed out because the target user didn't respond.");
break;
default:
Debug.LogError("Connection failed: Unknown error.");
break;
}
}
protected void SendInternal(CSteamID target, InternalMessages type) => SteamNetworking.SendP2PPacket(target, new byte[] { (byte)type }, 1, EP2PSend.k_EP2PSendReliable, internal_ch);
protected void Send(CSteamID host, byte[] msgBuffer, int channel) => SteamNetworking.SendP2PPacket(host, msgBuffer, (uint)msgBuffer.Length, channels[Mathf.Min(channel, channels.Length - 1)], channel);
private bool Receive(out CSteamID clientSteamID, out byte[] receiveBuffer, int channel)
{
if (SteamNetworking.IsP2PPacketAvailable(out uint packetSize, channel))
{
receiveBuffer = new byte[packetSize];
return SteamNetworking.ReadP2PPacket(receiveBuffer, packetSize, out _, out clientSteamID, channel);
}
receiveBuffer = null;
clientSteamID = CSteamID.Nil;
return false;
}
protected void CloseP2PSessionWithUser(CSteamID clientSteamID) => SteamNetworking.CloseP2PSessionWithUser(clientSteamID);
protected void WaitForClose(CSteamID cSteamID)
{
if (transport.enabled)
{
transport.StartCoroutine(DelayedClose(cSteamID));
}
else
{
CloseP2PSessionWithUser(cSteamID);
}
}
private IEnumerator DelayedClose(CSteamID cSteamID)
{
yield return null;
CloseP2PSessionWithUser(cSteamID);
}
public void ReceiveData()
{
try
{
while (transport.enabled && Receive(out CSteamID clientSteamID, out byte[] internalMessage, internal_ch))
{
if (internalMessage.Length == 1)
{
OnReceiveInternalData((InternalMessages)internalMessage[0], clientSteamID);
return; // Wait one frame
}
else
{
Debug.Log("Incorrect package length on internal channel.");
}
}
for (int chNum = 0; chNum < channels.Length; chNum++)
{
while (transport.enabled && Receive(out CSteamID clientSteamID, out byte[] receiveBuffer, chNum))
{
OnReceiveData(receiveBuffer, clientSteamID, chNum);
}
}
}
catch (Exception e)
{
Debug.LogException(e);
}
}
protected abstract void OnReceiveInternalData(InternalMessages type, CSteamID clientSteamID);
protected abstract void OnReceiveData(byte[] data, CSteamID clientSteamID, int channel);
protected abstract void OnConnectionFailed(CSteamID remoteId);
}
} | 30.711409 | 204 | 0.664336 | [
"MIT"
] | BrendanRobins97/MirrorTesting | Assets/Mirror/FizzySteamyMirror/LegacyCommon.cs | 4,578 | C# |
/*
// <copyright>
// dotNetRDF is free and open source software licensed under the MIT License
// -------------------------------------------------------------------------
//
// Copyright (c) 2009-2017 dotNetRDF Project (http://dotnetrdf.org/)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is furnished
// to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json.Linq;
using VDS.RDF.JsonLd;
using VDS.RDF.Parsing;
namespace VDS.RDF.Writing
{
/// <summary>
/// Class for serializing a Triple Store in JSON-LD syntax
/// </summary>
public partial class JsonLdWriter : IStoreWriter
{
private readonly JsonLdWriterOptions _options;
/// <summary>
/// Create a new serializer with default serialization options
/// </summary>
public JsonLdWriter()
{
_options = new JsonLdWriterOptions();
}
/// <summary>
/// Create a new serializer with the specified serialization options
/// </summary>
/// <param name="options"></param>
public JsonLdWriter(JsonLdWriterOptions options)
{
_options = options;
}
/// <inheritdoc/>
public void Save(ITripleStore store, string filename)
{
var jsonArray = SerializeStore(store);
using (var writer = new StreamWriter(File.Open(filename, FileMode.Create, FileAccess.Write),
Encoding.UTF8))
{
writer.Write(jsonArray);
}
}
/// <inheritdoc/>
public void Save(ITripleStore store, TextWriter output)
{
Save(store, output, false);
}
/// <inheritdoc/>
public void Save(ITripleStore store, TextWriter output, bool leaveOpen)
{
var jsonArray = SerializeStore(store);
output.Write(jsonArray.ToString(_options.JsonFormatting));
output.Flush();
if (!leaveOpen)
{
output.Close();
}
}
/// <summary>
/// Serialize a Triple Store to an expanded JSON-LD document
/// </summary>
/// <param name="store"></param>
/// <returns></returns>
public JArray SerializeStore(ITripleStore store)
{
// 1 - Initialize default graph to an empty dictionary.
var defaultGraph = new Dictionary<string, JObjectWithUsages>();
// 2 - Initialize graph map to a dictionary consisting of a single member @default whose value references default graph.
var graphMap = new Dictionary<string, Dictionary<string, JObjectWithUsages>>{{"@default", defaultGraph}};
// 3 - Initialize node usages map to an empty dictionary.
var nodeUsagesMap = new Dictionary<string, JArray>();
// 4 - For each graph in RDF dataset:
foreach (var graph in store.Graphs)
{
// 4.1 - If graph is the default graph, set name to @default, otherwise to the graph name associated with graph.
string name = graph.BaseUri == null ? "@default" : graph.BaseUri.ToString();
// 4.2 - If graph map has no name member, create one and set its value to an empty dictionary.
if (!graphMap.ContainsKey(name))
{
graphMap.Add(name, new Dictionary<string, JObjectWithUsages>());
}
// 4.3 - If graph is not the default graph and default graph does not have a name member, create such a member and initialize its value to a new dictionary with a single member @id whose value is name.
if (name != "@default")
{
if (!defaultGraph.ContainsKey(name))
{
defaultGraph.Add(name, new JObjectWithUsages(new JProperty("@id", name)));
}
}
// 4.4 - Reference the value of the name member in graph map using the variable node map.
var nodeMap = graphMap[name];
// 4.5 - For each RDF triple in graph consisting of subject, predicate, and object:
foreach (var triple in graph.Triples)
{
var subject = MakeNodeString(triple.Subject);
var predicate = MakeNodeString(triple.Predicate);
// 4.5.1 - If node map does not have a subject member, create one and initialize its value to a new dictionary consisting of a single member @id whose value is set to subject.
if (!nodeMap.ContainsKey(subject))
{
nodeMap.Add(subject, new JObjectWithUsages(new JProperty("@id", subject)));
}
// 4.5.2 - Reference the value of the subject member in node map using the variable node.
var node = nodeMap[subject];
// 4.5.3 - If object is an IRI or blank node identifier, and node map does not have an object member,
// create one and initialize its value to a new dictionary consisting of a single member @id whose value is set to object.
if (triple.Object is IUriNode || triple.Object is IBlankNode)
{
var obj = MakeNodeString(triple.Object);
if (!nodeMap.ContainsKey(obj))
{
nodeMap.Add(obj, new JObjectWithUsages(new JProperty("@id", obj)));
}
}
// 4.5.4 - If predicate equals rdf:type, the use rdf:type flag is not true, and object is an IRI or blank node identifier,
// append object to the value of the @type member of node; unless such an item already exists.
// If no such member exists, create one and initialize it to an array whose only item is object.
// Finally, continue to the next RDF triple.
if (predicate.Equals(RdfSpecsHelper.RdfType) && !_options.UseRdfType &&
(triple.Object is IUriNode || triple.Object is IBlankNode))
{
if (node.Property("@type") == null)
{
node.Add("@type", new JArray(MakeNodeString(triple.Object)));
}
else
{
(node["@type"] as JArray).Add(MakeNodeString(triple.Object));
}
continue;
}
// 4.5.5 - Set value to the result of using the RDF to Object Conversion algorithm, passing object and use native types.
var value = RdfToObject(triple.Object);
// 4.5.6 - If node does not have an predicate member, create one and initialize its value to an empty array.
if (node.Property(predicate) == null)
{
node.Add(predicate, new JArray());
}
// 4.5.7 - If there is no item equivalent to value in the array associated with the predicate member of node, append a reference to value to the array. Two JSON objects are considered equal if they have equivalent key-value pairs.
AppendUniqueElement(value, node[predicate] as JArray);
// 4.5.8 - If object is a blank node identifier or IRI, it might represent the list node:
if (triple.Object is IBlankNode || triple.Object is IUriNode)
{
// 4.5.8.1
var obj = MakeNodeString(triple.Object);
if (!nodeUsagesMap.ContainsKey(obj))
{
nodeUsagesMap.Add(obj, new JArray());
}
// 4.5.8.2
// AppendUniqueElement(node["@id"], nodeUsagesMap[obj] as JArray);
// KA - looks like a bug in the spec, if we don't add duplicate entries then this map does not correctly detect when a list node is referred to by the same subject in different statements
(nodeUsagesMap[obj]).Add(node["@id"]);
// 4.8.5.4
nodeMap[obj].Usages.Add(new Usage(node, predicate, value));
}
}
}
// 5 - For each name and graph object in graph map:
foreach (var gp in graphMap)
{
var graphObject = gp.Value;
// 5.1 - If graph object has no rdf:nil member, continue with the next name-graph object pair as the graph does not contain any lists that need to be converted.
if (!graphObject.ContainsKey(RdfSpecsHelper.RdfListNil))
{
continue;
}
// 5.2 - Initialize nil to the value of the rdf:nil member of graph object.
var nil = graphObject[RdfSpecsHelper.RdfListNil];
// 5.3 - For each item usage in the usages member of nil, perform the following steps:
var nilUsages = nil.Usages;
if (nilUsages != null)
{
foreach (var usage in nilUsages)
{
// 5.3.1 - Initialize node to the value of the value of the node member of usage,
// property to the value of the property member of usage, and head to the value of the value member of usage.
var node = usage.Node;
var property = usage.Property;
var head = usage.Value as JObject;
// 5.3.2 - Initialize two empty arrays list and list nodes.
var list = new JArray();
var listNodes = new JArray();
// 5.3.3 - While property equals rdf:rest, the array value of the member of node usages map associated with the
// @id member of node has only one member, the value associated to the usages member of node has exactly 1 entry,
// node has a rdf:first and rdf:rest property, both of which have as value an array consisting of a single element,
// and node has no other members apart from an optional @type member whose value is an array with a single item equal
// to rdf:List, node represents a well-formed list node.
// Perform the following steps to traverse the list backwards towards its head:
while (IsWellFormedListNode(node, property, nodeUsagesMap))
{
// 5.3.3.1 - Append the only item of rdf:first member of node to the list array.
list.Add((node[RdfSpecsHelper.RdfListFirst] as JArray)[0]);
// 5.3.3.2 - Append the value of the @id member of node to the list nodes array.
listNodes.Add(node["@id"]);
// 5.3.3.3 - Initialize node usage to the only item of the usages member of node.
var nodeUsage = node.Usages[0];
// 5.3.3.4 - Set node to the value of the node member of node usage, property to the value of the property member of node usage, and head to the value of the value member of node usage.
node = nodeUsage.Node;
property = nodeUsage.Property;
head = nodeUsage.Value as JObject;
// 5.3.3.5 - If the @id member of node is an IRI instead of a blank node identifier, exit the while loop.
if (!JsonLdProcessor.IsBlankNodeIdentifier(node["@id"].Value<string>())) break;
}
// 5.3.4 - If property equals rdf:first, i.e., the detected list is nested inside another list
if (property.Equals(RdfSpecsHelper.RdfListFirst))
{
// 5.3.4.1 - and the value of the @id of node equals rdf:nil, i.e., the detected list is empty, continue with the next usage item. The rdf:nil node cannot be converted to a list object as it would result in a list of lists, which isn't supported.
if (RdfSpecsHelper.RdfListNil.Equals(node["@id"].Value<string>()))
{
continue;
}
// 5.3.4.2 - Otherwise, the list consists of at least one item. We preserve the head node and transform the rest of the linked list to a list object.
// 5.3.4.3 - Set head id to the value of the @id member of head.
var headId = head["@id"].Value<string>();
// 5.3.4.4 - Set head to the value of the head id member of graph object so that all it's properties can be accessed.
head = graphObject[headId];
// 5.3.4.5 - Then, set head to the only item in the value of the rdf:rest member of head.
head = (head[RdfSpecsHelper.RdfListRest] as JArray)[0] as JObject;
// 5.3.4.6 - Finally, remove the last item of the list array and the last item of the list nodes array.
list.RemoveAt(list.Count - 1);
listNodes.RemoveAt(listNodes.Count - 1);
}
// 5.3.5 - Remove the @id member from head.
head.Remove("@id");
// 5.3.6 - Reverse the order of the list array.
list = new JArray(list.Reverse());
// 5.3.7 - Add an @list member to head and initialize its value to the list array.
head["@list"] = list;
// 5.3.8 - For each item node id in list nodes, remove the node id member from graph object.
foreach (var nodeId in listNodes)
{
graphObject.Remove(nodeId.Value<string>());
}
}
}
}
// 6 - Initialize an empty array result.
var result = new JArray();
// 7 - For each subject and node in default graph ordered by subject:
foreach (var dgp in defaultGraph.OrderBy(p => p.Key))
{
var subject = dgp.Key;
var node = dgp.Value as JObject;
// 7.1 - If graph map has a subject member:
if (graphMap.ContainsKey(subject))
{
// 7.1.1 - Add an @graph member to node and initialize its value to an empty array.
var graphArray = new JArray();
node["@graph"] = graphArray;
// 7.2.2 - For each key-value pair s-n in the subject member of graph map ordered by s, append n to the @graph member of node after removing its usages member, unless the only remaining member of n is @id.
foreach (var sp in graphMap[subject].OrderBy(sp => sp.Key))
{
var n = sp.Value as JObject;
n.Remove("usages");
if (n.Properties().Any(np => !np.Name.Equals("@id")))
{
graphArray.Add(n);
}
}
}
// 7.2 - Append node to result after removing its usages member, unless the only remaining member of node is @id.
node.Remove("usages");
if (node.Properties().Any(p => !p.Name.Equals("@id")))
{
result.Add(node);
}
}
// 8 - Return result.
return result;
}
private static bool IsWellFormedListNode(JObjectWithUsages node, string property, Dictionary<string, JArray> nodeUsagesMap)
{
// While property equals rdf:rest, the array value of the member of node usages map associated with the
// @id member of node has only one member, the value associated to the usages member of node has exactly 1 entry,
// node has a rdf:first and rdf:rest property, both of which have as value an array consisting of a single element,
// and node has no other members apart from an optional @type member whose value is an array with a single item equal
// to rdf:List, node represents a well-formed list node.
if (!RdfSpecsHelper.RdfListRest.Equals(property)) return false;
var nodeId = node["@id"].Value<string>();
if (nodeId == null) return false;
// Not mentioned in spec, but if node is not a blank node we should not merge it into a list array
if (!JsonLdProcessor.IsBlankNodeIdentifier(nodeId)) return false;
var mapEntry = nodeUsagesMap[nodeId] as JArray;
if (mapEntry == null || mapEntry.Count != 1) return false;
if (node.Usages.Count != 1) return false;
var first = node[RdfSpecsHelper.RdfListFirst] as JArray;
var rest = node[RdfSpecsHelper.RdfListRest] as JArray;
if (first == null || rest == null) return false;
if (first.Count != 1 || rest.Count != 1) return false;
var type = node["@type"] as JArray;
if (type != null && (type.Count != 1 ||
type.Count == 1 && !type[0].Value<string>().Equals(RdfSpecsHelper.RdfList)))
return false;
var propCount = node.Properties().Count();
if (type == null && propCount != 3 || type != null && propCount != 4) return false;
return true;
}
private JToken RdfToObject(INode value)
{
// 1 - If value is an IRI or a blank node identifier, return a new dictionary consisting of a single member @id whose value is set to value.
if (value is IUriNode uriNode)
{
return new JObject(new JProperty("@id", uriNode.Uri.OriginalString));
}
if (value is IBlankNode bNode)
{
return new JObject(new JProperty("@id", "_:" + bNode.InternalID));
}
// 2 - Otherwise value is an RDF literal:
var literal = value as ILiteralNode;
// 2.1 - Initialize a new empty dictionary result.
var result = new JObject();
// 2.2 - Initialize converted value to value.
JToken convertedValue = new JValue(literal.Value);
// 2.3 - Initialize type to null
string type = null;
// 2.4 - If use native types is true
if (_options.UseNativeTypes && literal.DataType != null)
{
// 2.4.1 - If the datatype IRI of value equals xsd:string, set converted value to the lexical form of value.
if (literal.DataType.ToString().Equals(XmlSpecsHelper.XmlSchemaDataTypeString))
{
convertedValue = new JValue(literal.Value);
}
// 2.4.2 - Otherwise, if the datatype IRI of value equals xsd:boolean, set converted value to true if the lexical form of value matches true, or false if it matches false. If it matches neither, set type to xsd:boolean.
else if (literal.DataType.ToString()
.Equals(XmlSpecsHelper.XmlSchemaDataTypeBoolean))
{
if (literal.Value.Equals("true"))
{
convertedValue = new JValue(true);
} else if (literal.Value.Equals("false"))
{
convertedValue = new JValue(false);
}
else
{
type = XmlSpecsHelper.XmlSchemaDataTypeBoolean;
}
}
// 2.4.3 - Otherwise, if the datatype IRI of value equals xsd:integer or xsd:double and its lexical form is a valid xsd:integer or xsd:double according [XMLSCHEMA11-2], set converted value to the result of converting the lexical form to a JSON number.
else if (literal.DataType.ToString().Equals(XmlSpecsHelper.XmlSchemaDataTypeInteger))
{
if (IsWellFormedInteger(literal.Value))
{
convertedValue = new JValue(long.Parse(literal.Value));
}
}
else if (literal.DataType.ToString().Equals(XmlSpecsHelper.XmlSchemaDataTypeDouble))
{
if (IsWellFormedDouble(literal.Value))
{
convertedValue = new JValue(double.Parse(literal.Value));
}
}
// KA: Step missing from spec - otherwise set type to the datatype IRI
else
{
type = literal.DataType.ToString();
}
}
// 2.5 - Otherwise, if value is a language-tagged string add a member @language to result and set its value to the language tag of value.
else if (!String.IsNullOrEmpty(literal.Language))
{
result["@language"] = literal.Language;
}
// 2.6 - Otherwise, set type to the datatype IRI of value, unless it equals xsd:string which is ignored.
else
{
if (literal.DataType != null && !literal.DataType.ToString()
.Equals(XmlSpecsHelper.XmlSchemaDataTypeString))
{
type = literal.DataType.ToString();
}
}
// 2.7 - Add a member @value to result whose value is set to converted value.
result["@value"] = convertedValue;
// 2.8 - If type is not null, add a member @type to result whose value is set to type.
if (type != null) result["@type"] = type;
// 2.9 - Return result.
return result;
}
private static readonly Regex IntegerLexicalRepresentation = new Regex(@"^(\+|\-)?\d+$");
private static bool IsWellFormedInteger(string literal)
{
return IntegerLexicalRepresentation.IsMatch(literal);
}
private static readonly Regex DoubleLexicalRepresentation = new Regex(@"^((\+|-)?([0-9]+(\.[0-9]*)?|\.[0-9]+)([Ee](\+|-)?[0-9]+)?|(\+|-)?INF|NaN)$");
private static bool IsWellFormedDouble(string literal)
{
return DoubleLexicalRepresentation.IsMatch(literal);
}
private static void AppendUniqueElement(JToken element, JArray toArray)
{
if (!toArray.Any(x => JToken.DeepEquals(x, element)))
{
toArray.Add(element);
}
}
private static string MakeNodeString(INode node)
{
var uriNode = node as IUriNode;
if (uriNode != null)
{
return uriNode.Uri.OriginalString;
}
var blankNode = node as IBlankNode;
if (blankNode != null)
{
return "_:" + blankNode.InternalID;
}
throw new ArgumentException("Node must be a blank node or URI node", nameof(node));
}
/// <inheritdoc/>
public event StoreWriterWarning Warning;
private class JObjectWithUsages : JObject
{
public readonly List<Usage> Usages = new List<Usage>();
public JObjectWithUsages(params object[] content) : base(content) { }
}
private class Usage
{
public Usage(JObjectWithUsages node, string property, JToken value)
{
Node = node;
Property = property;
Value = value;
}
public JObjectWithUsages Node { get; }
public string Property { get; }
public JToken Value { get; }
}
}
}
| 51.526946 | 274 | 0.534883 | [
"MIT"
] | TaviTruman/dotnetrdf | Libraries/dotNetRDF/Writing/JsonLdWriter.cs | 25,817 | C# |
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the inspector-2016-02-16.normal.json service model.
*/
using System;
using Amazon.Runtime;
using Amazon.Util.Internal;
namespace Amazon.Inspector
{
/// <summary>
/// Configuration for accessing Amazon Inspector service
/// </summary>
public partial class AmazonInspectorConfig : ClientConfig
{
private static readonly string UserAgentString =
InternalSDKUtils.BuildUserAgentString("3.5.0.67");
private string _userAgent = UserAgentString;
/// <summary>
/// Default constructor
/// </summary>
public AmazonInspectorConfig()
{
this.AuthenticationServiceName = "inspector";
}
/// <summary>
/// The constant used to lookup in the region hash the endpoint.
/// </summary>
public override string RegionEndpointServiceName
{
get
{
return "inspector";
}
}
/// <summary>
/// Gets the ServiceVersion property.
/// </summary>
public override string ServiceVersion
{
get
{
return "2016-02-16";
}
}
/// <summary>
/// Gets the value of UserAgent property.
/// </summary>
public override string UserAgent
{
get
{
return _userAgent;
}
}
}
} | 26.0625 | 107 | 0.58705 | [
"Apache-2.0"
] | motoko89/aws-sdk-net-xamarin | sdk/src/Services/Inspector/Generated/AmazonInspectorConfig.cs | 2,085 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Drawing;
using System.Timers;
using MonoMac.Foundation;
using MonoMac.AppKit;
using MonoMac.CoreAnimation;
using MonoMac.CoreGraphics;
namespace GlossyClock
{
public partial class ClockView : MonoMac.AppKit.NSView
{
CALayer backgroundLayer;
CATextLayer clockFaceLayer;
ClockTimer clockTimer;
public ClockView (IntPtr handle) : base(handle)
{
Initialize ();
}
[Export("initWithCoder:")]
public ClockView (NSCoder coder) : base(coder)
{
Initialize ();
}
void Initialize ()
{
clockTimer = new ClockTimer ();
Layer = SetupLayers();
}
public override void AwakeFromNib ()
{
WantsLayer = true;
}
private CALayer SetupLayers()
{
backgroundLayer = SetupBackgroundLayer ();
backgroundLayer.AddSublayer (SetupClockFaceLayer ());
backgroundLayer.AddSublayer (SetupBorderLayer ());
backgroundLayer.AddSublayer (SetupGlossyLayer ());
return backgroundLayer;
}
CALayer SetupBackgroundLayer()
{
backgroundLayer = new CAGradientLayer ();
CGColor gradColor1 = new CGColor (13.0f / 255.0f, 116.0f / 255.0f, 1.0f,1.0f);
CGColor gradColor2 = new CGColor (0.0f, 53.0f / 255.0f, 126.0f / 255.0f,1.0f);
((CAGradientLayer)backgroundLayer).Colors = new CGColor[2] { gradColor1, gradColor2 };
backgroundLayer.CornerRadius = 12.0f;
CAConstraintLayoutManager layout = CAConstraintLayoutManager.LayoutManager;
backgroundLayer.LayoutManager = layout;
return backgroundLayer;
}
CALayer SetupBorderLayer()
{
CALayer borderLayer = CALayer.Create();
RectangleF borderRect = Frame.Inset (8, 8);
borderLayer.CornerRadius = 12;
borderLayer.BorderColor = new CGColor (1, 1, 1, 1);
borderLayer.BorderWidth = 2;
borderLayer.Frame = borderRect;
return borderLayer;
}
CALayer SetupClockFaceLayer()
{
clockFaceLayer = new CATextLayer (){
FontSize = 60,
ShadowOpacity = .9f
};
clockFaceLayer.Bind ("string", clockTimer, "outputString", null);
clockFaceLayer.SetFont ("Menlo");
// Constrain the text layer in the middle
var constraint = CAConstraint.Create (CAConstraintAttribute.MidX, "superlayer", CAConstraintAttribute.MidX);
clockFaceLayer.AddConstraint (constraint);
constraint = CAConstraint.Create (CAConstraintAttribute.MidY, "superlayer", CAConstraintAttribute.MidY);
clockFaceLayer.AddConstraint (constraint);
return clockFaceLayer;
}
CALayer SetupGlossyLayer()
{
// Create the CGImage by proxying it through an NSImage
string filePath = NSBundle.MainBundle.PathForResource("clock-gloss","png");
var glossyImage = new NSImage (filePath).AsCGImage (RectangleF.Empty, null, null);
CALayer glossLayer = new CALayer() {
Opacity = 0.8f,
CornerRadius = 12,
MasksToBounds = true,
Frame = this.Frame,
Contents = glossyImage
};
return glossLayer;
}
}
}
| 24.586777 | 111 | 0.70521 | [
"MIT"
] | kangaroo/monomac | samples/GlossyClock/ClockView.cs | 2,975 | C# |
namespace Machete.X12Schema.V5010
{
using X12;
public interface T304 :
X12Layout
{
Segment<GS> FunctionalGroupHeader { get; }
Segment<ST> TransactionSetHeader { get; }
Segment<B2> BeginningShipmentInformationTransaction { get; }
Segment<B2A> SetPurpose { get; }
SegmentList<Y6> Authentication { get; }
Segment<B1> BeginningSegmentForBookingOrPickupDelivery { get; }
SegmentList<G1> ShipmentTypeInformation { get; }
SegmentList<G2> BeyondRouting { get; }
SegmentList<G3> CompensationInformation { get; }
SegmentList<N9> ExtendedReferenceInformation { get; }
SegmentList<YNQ> YesOrNoQuestion { get; }
SegmentList<V1> VesselIdentification { get; }
Segment<Y3> SpaceConfirmation { get; }
Segment<M0> LetterOfCreditReference { get; }
Segment<CUR> Currency { get; }
LayoutList<LoopM1_304> LoopM1 { get; }
Segment<M2> SalesOrDeliveryTerms { get; }
Segment<C2> BankId { get; }
Segment<ITD> TermsOfSale { get; }
SegmentList<DTM> DateOrTimeReference { get; }
LayoutList<LoopN1_304> LoopN1 { get; }
LayoutList<LoopR4_304> LoopR4 { get; }
SegmentList<R2A> RouteInformationWithPreference { get; }
SegmentList<R2> RouteInformation { get; }
SegmentList<K1> Remarks { get; }
SegmentList<L11> BusinessInstructionsAndReferenceNumber { get; }
SegmentList<H3> SpecialHandlingInstructions { get; }
SegmentList<L5> DescriptionMarksAndNumbers { get; }
SegmentList<X1> ExportLicense { get; }
SegmentList<X2> ImportLicense { get; }
LayoutList<LoopC8_304> LoopC8 { get; }
LayoutList<LoopLX_304> LoopLX { get; }
LayoutList<LoopL3_304> LoopL3 { get; }
Segment<SE> TransactionSetTrailer { get; }
Segment<GE> FunctionalGroupTrailer { get; }
}
} | 27.873418 | 72 | 0.564941 | [
"Apache-2.0"
] | ahives/Machete | src/Machete.X12Schema/V5010/Layouts/T304.cs | 2,202 | C# |
/* MapleLib - A general-purpose MapleStory library
* Copyright (C) 2009, 2010, 2015 Snow and haha01haha01
* This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
* This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.*/
using System;
using System.Net;
using System.Net.Sockets;
using MapleLib.MapleCryptoLib;
namespace MapleLib.PacketLib
{
public class Monitor
{
/// <summary>
/// The Monitor socket
/// </summary>
private readonly Socket _socket;
/// <summary>
/// The Recieved packet crypto manager
/// </summary>
private MapleCrypto _RIV;
/// <summary>
/// The Sent packet crypto manager
/// </summary>
private MapleCrypto _SIV;
/// <summary>
/// Method to handle packets received
/// </summary>
public delegate void PacketReceivedHandler(PacketReader packet);
/// <summary>
/// Packet received event
/// </summary>
public event PacketReceivedHandler OnPacketReceived;
/// <summary>
/// Method to handle client disconnected
/// </summary>
public delegate void ClientDisconnectedHandler(Monitor monitor);
/// <summary>
/// Client disconnected event
/// </summary>
public event ClientDisconnectedHandler OnClientDisconnected;
/// <summary>
/// The Recieved packet crypto manager
/// </summary>
public MapleCrypto RIV
{
get { return _RIV; }
set { _RIV = value; }
}
/// <summary>
/// The Sent packet crypto manager
/// </summary>
public MapleCrypto SIV
{
get { return _SIV; }
set { _SIV = value; }
}
/// <summary>
/// The Monitor's socket
/// </summary>
public Socket Socket
{
get { return _socket; }
}
/// <summary>
/// Creates a new instance of Monitor
/// </summary>
public Monitor()
{
_socket = new Socket(AddressFamily.InterNetwork, SocketType.Raw, ProtocolType.IP);
}
/// <summary>
/// Starts listening and accepting connections
/// </summary>
/// <param name="port">Port to listen to</param>
public void StartMonitoring(IPAddress IP)
{
_socket.Bind(new IPEndPoint(IP, 0));
_socket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.HeaderIncluded, true);//??
byte[] byIn = new byte[4] { 1, 0, 0, 0 };
byte[] byOut = null;
_socket.IOControl(IOControlCode.ReceiveAll, byIn, byOut);
WaitForData();
}
/// <summary>
/// Waits for more data to arrive
/// </summary>
public void WaitForData()
{
WaitForData(new SocketInfo(_socket, short.MaxValue));
}
/// <summary>
/// Waits for more data to arrive
/// </summary>
/// <param name="socketInfo">Info about data to be received</param>
private void WaitForData(SocketInfo socketInfo)
{
try
{
_socket.BeginReceive(socketInfo.DataBuffer,
socketInfo.Index,
socketInfo.DataBuffer.Length - socketInfo.Index,
SocketFlags.None,
new AsyncCallback(OnDataReceived),
socketInfo);
}
catch (Exception se)
{
Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.WaitForData: " + se);
//Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.WaitForData: " + se);
}
}
private void OnDataReceived(IAsyncResult iar)
{
SocketInfo socketInfo = (SocketInfo)iar.AsyncState;
try
{
int received = socketInfo.Socket.EndReceive(iar);
if (received == 0)
{
if (OnClientDisconnected != null)
{
OnClientDisconnected(this);
}
return;
}
socketInfo.Index += received;
byte[] dataa = new byte[received];
Buffer.BlockCopy(socketInfo.DataBuffer, 0, dataa, 0, received);
if (OnPacketReceived != null)
OnPacketReceived.Invoke(new PacketReader(dataa));
//Console.WriteLine(BitConverter.ToString(dataa));
//Console.WriteLine(HexEncoding.ToStringFromAscii(dataa));
WaitForData();
/*if (socketInfo.Index == socketInfo.DataBuffer.Length) {
switch (socketInfo.State) {
case SocketInfo.StateEnum.Header:
PacketReader headerReader = new PacketReader(socketInfo.DataBuffer);
byte[] packetHeaderB = headerReader.ToArray();
int packetHeader = headerReader.ReadInt();
short packetLength = (short)MapleCrypto.getPacketLength(packetHeader);
if (!_RIV.checkPacket(packetHeader)) {
Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Packet check failed. Disconnecting client.");
this.Socket.Close();
}
socketInfo.State = SocketInfo.StateEnum.Content;
socketInfo.DataBuffer = new byte[packetLength];
socketInfo.Index = 0;
WaitForData(socketInfo);
break;
case SocketInfo.StateEnum.Content:
byte[] data = socketInfo.DataBuffer;
_RIV.crypt(data);
MapleCustomEncryption.Decrypt(data);
if (data.Length != 0 && OnPacketReceived != null) {
OnPacketReceived(new PacketReader(data));
}
WaitForData();
break;
}
} else {
Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Warning] Not enough data");
WaitForData(socketInfo);
}*/
}
catch (ObjectDisposedException)
{
Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.OnDataReceived: Socket has been closed");
//Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.OnDataReceived: Socket has been closed");
}
catch (SocketException se)
{
if (se.ErrorCode != 10054)
{
Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.OnDataReceived: " + se);
//Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.OnDataReceived: " + se);
}
}
catch (Exception e)
{
Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.OnDataReceived: " + e);
//Helpers.ErrorLogger.Log(Helpers.ErrorLevel.Critical, "[Error] Session.OnDataReceived: " + e);
}
}
}
} | 28.946188 | 129 | 0.665066 | [
"MPL-2.0",
"MPL-2.0-no-copyleft-exception"
] | 369MJ/Harepacker-resurrected | MapleLib/PacketLib/Monitor.cs | 6,457 | C# |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Internal;
using Microsoft.AspNetCore.Mvc.Abstractions;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using Microsoft.AspNetCore.Testing;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Primitives;
using Xunit;
namespace Microsoft.AspNetCore.Mvc.IntegrationTests
{
// Integration tests targeting the behavior of the MutableObjectModelBinder and related classes
// with other model binders.
public class ComplexTypeModelBinderIntegrationTest
{
private const string AddressBodyContent = "{ \"street\" : \"" + AddressStreetContent + "\" }";
private const string AddressStreetContent = "1 Microsoft Way";
private static readonly byte[] ByteArrayContent = Encoding.BigEndianUnicode.GetBytes("abcd");
private static readonly string ByteArrayEncoded = Convert.ToBase64String(ByteArrayContent);
private class Order1
{
public int ProductId { get; set; }
public Person1 Customer { get; set; }
}
private class Person1
{
public string Name { get; set; }
[FromBody]
public Address1 Address { get; set; }
}
private class Address1
{
public string Street { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithBodyModelBinder_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order1)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Customer.Name=bill");
SetJsonBodyContent(request, AddressBodyContent);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order1>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.NotNull(model.Customer.Address);
Assert.Equal(AddressStreetContent, model.Customer.Address.Street);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithBodyModelBinder_WithEmptyPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order1)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Customer.Name=bill");
SetJsonBodyContent(request, AddressBodyContent);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order1>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.NotNull(model.Customer.Address);
Assert.Equal(AddressStreetContent, model.Customer.Address.Street);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithBodyModelBinder_WithPrefix_NoBodyData()
{
// Arrange
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order1)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Customer.Name=bill");
request.ContentType = "application/json";
});
var optionsAccessor = testContext.GetService<IOptions<MvcOptions>>();
optionsAccessor.Value.AllowEmptyInputInBodyModelBinding = true;
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
var parameterBinder = ModelBindingTestHelper.GetParameterBinder(optionsAccessor.Value);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order1>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.Null(model.Customer.Address);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
// We don't provide enough data in this test for the 'Person' model to be created. So even though there is
// body data in the request, it won't be used.
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithBodyModelBinder_WithPrefix_PartialData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order1)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.ProductId=10");
SetJsonBodyContent(request, AddressBodyContent);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order1>(modelBindingResult.Model);
Assert.Null(model.Customer);
Assert.Equal(10, model.ProductId);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.ProductId").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
}
// We don't provide enough data in this test for the 'Person' model to be created. So even though there is
// body data in the request, it won't be used.
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithBodyModelBinder_WithPrefix_NoData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order1)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
SetJsonBodyContent(request, AddressBodyContent);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order1>(modelBindingResult.Model);
Assert.Null(model.Customer);
Assert.Empty(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
}
private class Order3
{
public int ProductId { get; set; }
public Person3 Customer { get; set; }
}
private class Person3
{
public string Name { get; set; }
public byte[] Token { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithByteArrayModelBinder_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order3)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString =
new QueryString("?parameter.Customer.Name=bill¶meter.Customer.Token=" + ByteArrayEncoded);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order3>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.Equal(ByteArrayContent, model.Customer.Token);
Assert.Equal(2, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Token").Value;
Assert.Equal(ByteArrayEncoded, entry.AttemptedValue);
Assert.Equal(ByteArrayEncoded, entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithByteArrayModelBinder_WithEmptyPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order3)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Customer.Name=bill&Customer.Token=" + ByteArrayEncoded);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order3>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.Equal(ByteArrayContent, model.Customer.Token);
Assert.Equal(2, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "Customer.Token").Value;
Assert.Equal(ByteArrayEncoded, entry.AttemptedValue);
Assert.Equal(ByteArrayEncoded, entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithByteArrayModelBinder_WithPrefix_NoData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order3)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Customer.Name=bill");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order3>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.Null(model.Customer.Token);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
private class Order4
{
public int ProductId { get; set; }
public Person4 Customer { get; set; }
}
private class Person4
{
public string Name { get; set; }
public IEnumerable<IFormFile> Documents { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithFormFileModelBinder_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order4)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Customer.Name=bill");
SetFormFileBodyContent(request, "Hello, World!", "parameter.Customer.Documents");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order4>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.Single(model.Customer.Documents);
Assert.Equal(2, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Documents").Value;
Assert.Null(entry.AttemptedValue); // FormFile entries for body don't include original text.
Assert.Null(entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithFormFileModelBinder_WithEmptyPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order4)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Customer.Name=bill");
SetFormFileBodyContent(request, "Hello, World!", "Customer.Documents");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order4>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.Single(model.Customer.Documents);
Assert.Equal(2, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Customer.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "Customer.Documents").Value;
Assert.Null(entry.AttemptedValue); // FormFile entries don't include the model.
Assert.Null(entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithFormFileModelBinder_WithPrefix_NoBodyData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order4)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Customer.Name=bill");
// Deliberately leaving out any form data.
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order4>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal("bill", model.Customer.Name);
Assert.Null(model.Customer.Documents);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var kvp = Assert.Single(modelState);
Assert.Equal("parameter.Customer.Name", kvp.Key);
var entry = kvp.Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
// We don't provide enough data in this test for the 'Person' model to be created. So even though there are
// form files in the request, it won't be used.
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithFormFileModelBinder_WithPrefix_PartialData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order4)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.ProductId=10");
SetFormFileBodyContent(request, "Hello, World!", "parameter.Customer.Documents");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order4>(modelBindingResult.Model);
Assert.Null(model.Customer);
Assert.Equal(10, model.ProductId);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.ProductId").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
}
// We don't provide enough data in this test for the 'Person' model to be created. So even though there is
// body data in the request, it won't be used.
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithFormFileModelBinder_WithPrefix_NoData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order4)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
SetFormFileBodyContent(request, "Hello, World!", "parameter.Customer.Documents");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order4>(modelBindingResult.Model);
Assert.Null(model.Customer);
Assert.Empty(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
}
private class Order5
{
public string Name { get; set; }
public int[] ProductIds { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsArrayProperty_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order5)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString =
new QueryString("?parameter.Name=bill¶meter.ProductIds[0]=10¶meter.ProductIds[1]=11");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order5>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new int[] { 10, 11 }, model.ProductIds);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductIds[0]").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductIds[1]").Value;
Assert.Equal("11", entry.AttemptedValue);
Assert.Equal("11", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsArrayProperty_EmptyPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order5)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Name=bill&ProductIds[0]=10&ProductIds[1]=11");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order5>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new int[] { 10, 11 }, model.ProductIds);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductIds[0]").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductIds[1]").Value;
Assert.Equal("11", entry.AttemptedValue);
Assert.Equal("11", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsArrayProperty_NoCollectionData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order5)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Name=bill");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order5>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Null(model.ProductIds);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsArrayProperty_NoData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order5)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order5>(modelBindingResult.Model);
Assert.Null(model.Name);
Assert.Null(model.ProductIds);
Assert.Empty(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
}
private class Order6
{
public string Name { get; set; }
public List<int> ProductIds { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsListProperty_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order6)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString =
new QueryString("?parameter.Name=bill¶meter.ProductIds[0]=10¶meter.ProductIds[1]=11");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order6>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new List<int>() { 10, 11 }, model.ProductIds);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductIds[0]").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductIds[1]").Value;
Assert.Equal("11", entry.AttemptedValue);
Assert.Equal("11", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsListProperty_EmptyPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order6)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Name=bill&ProductIds[0]=10&ProductIds[1]=11");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order6>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new List<int>() { 10, 11 }, model.ProductIds);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductIds[0]").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductIds[1]").Value;
Assert.Equal("11", entry.AttemptedValue);
Assert.Equal("11", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsListProperty_NoCollectionData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order6)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Name=bill");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order6>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Null(model.ProductIds);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsListProperty_NoData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order6)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order6>(modelBindingResult.Model);
Assert.Null(model.Name);
Assert.Null(model.ProductIds);
Assert.Empty(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
}
private class Order7
{
public string Name { get; set; }
public Dictionary<string, int> ProductIds { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsDictionaryProperty_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order7)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString =
new QueryString("?parameter.Name=bill¶meter.ProductIds[0].Key=key0¶meter.ProductIds[0].Value=10");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order7>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new Dictionary<string, int>() { { "key0", 10 } }, model.ProductIds);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductIds[0].Key").Value;
Assert.Equal("key0", entry.AttemptedValue);
Assert.Equal("key0", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductIds[0].Value").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsDictionaryProperty_EmptyPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order7)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Name=bill&ProductIds[0].Key=key0&ProductIds[0].Value=10");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order7>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new Dictionary<string, int>() { { "key0", 10 } }, model.ProductIds);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductIds[0].Key").Value;
Assert.Equal("key0", entry.AttemptedValue);
Assert.Equal("key0", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductIds[0].Value").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsDictionaryProperty_NoCollectionData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order7)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Name=bill");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order7>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Null(model.ProductIds);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsDictionaryProperty_NoData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order7)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order7>(modelBindingResult.Model);
Assert.Null(model.Name);
Assert.Null(model.ProductIds);
Assert.Empty(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
}
// Dictionary property with an IEnumerable<> value type
private class Car1
{
public string Name { get; set; }
public Dictionary<string, IEnumerable<SpecDoc>> Specs { get; set; }
}
// Dictionary property with an Array value type
private class Car2
{
public string Name { get; set; }
public Dictionary<string, SpecDoc[]> Specs { get; set; }
}
private class Car3
{
public string Name { get; set; }
public IEnumerable<KeyValuePair<string, IEnumerable<SpecDoc>>> Specs { get; set; }
}
private class SpecDoc
{
public string Name { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsDictionaryProperty_WithIEnumerableComplexTypeValue_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "p",
ParameterType = typeof(Car1)
};
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
var queryString = "?p.Name=Accord"
+ "&p.Specs[0].Key=camera_specs"
+ "&p.Specs[0].Value[0].Name=camera_spec1.txt"
+ "&p.Specs[0].Value[1].Name=camera_spec2.txt"
+ "&p.Specs[1].Key=tyre_specs"
+ "&p.Specs[1].Value[0].Name=tyre_spec1.txt"
+ "&p.Specs[1].Value[1].Name=tyre_spec2.txt";
request.QueryString = new QueryString(queryString);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Car1>(modelBindingResult.Model);
Assert.Equal("Accord", model.Name);
Assert.Collection(
model.Specs,
(e) =>
{
Assert.Equal("camera_specs", e.Key);
Assert.Collection(
e.Value,
(s) =>
{
Assert.Equal("camera_spec1.txt", s.Name);
},
(s) =>
{
Assert.Equal("camera_spec2.txt", s.Name);
});
},
(e) =>
{
Assert.Equal("tyre_specs", e.Key);
Assert.Collection(
e.Value,
(s) =>
{
Assert.Equal("tyre_spec1.txt", s.Name);
},
(s) =>
{
Assert.Equal("tyre_spec2.txt", s.Name);
});
});
Assert.Equal(7, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "p.Name").Value;
Assert.Equal("Accord", entry.AttemptedValue);
Assert.Equal("Accord", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Key").Value;
Assert.Equal("camera_specs", entry.AttemptedValue);
Assert.Equal("camera_specs", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Value[0].Name").Value;
Assert.Equal("camera_spec1.txt", entry.AttemptedValue);
Assert.Equal("camera_spec1.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Value[1].Name").Value;
Assert.Equal("camera_spec2.txt", entry.AttemptedValue);
Assert.Equal("camera_spec2.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Key").Value;
Assert.Equal("tyre_specs", entry.AttemptedValue);
Assert.Equal("tyre_specs", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Value[0].Name").Value;
Assert.Equal("tyre_spec1.txt", entry.AttemptedValue);
Assert.Equal("tyre_spec1.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Value[1].Name").Value;
Assert.Equal("tyre_spec2.txt", entry.AttemptedValue);
Assert.Equal("tyre_spec2.txt", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsDictionaryProperty_WithArrayOfComplexTypeValue_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "p",
ParameterType = typeof(Car2)
};
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
var queryString = "?p.Name=Accord"
+ "&p.Specs[0].Key=camera_specs"
+ "&p.Specs[0].Value[0].Name=camera_spec1.txt"
+ "&p.Specs[0].Value[1].Name=camera_spec2.txt"
+ "&p.Specs[1].Key=tyre_specs"
+ "&p.Specs[1].Value[0].Name=tyre_spec1.txt"
+ "&p.Specs[1].Value[1].Name=tyre_spec2.txt";
request.QueryString = new QueryString(queryString);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Car2>(modelBindingResult.Model);
Assert.Equal("Accord", model.Name);
Assert.Collection(
model.Specs,
(e) =>
{
Assert.Equal("camera_specs", e.Key);
Assert.Collection(
e.Value,
(s) =>
{
Assert.Equal("camera_spec1.txt", s.Name);
},
(s) =>
{
Assert.Equal("camera_spec2.txt", s.Name);
});
},
(e) =>
{
Assert.Equal("tyre_specs", e.Key);
Assert.Collection(
e.Value,
(s) =>
{
Assert.Equal("tyre_spec1.txt", s.Name);
},
(s) =>
{
Assert.Equal("tyre_spec2.txt", s.Name);
});
});
Assert.Equal(7, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "p.Name").Value;
Assert.Equal("Accord", entry.AttemptedValue);
Assert.Equal("Accord", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Key").Value;
Assert.Equal("camera_specs", entry.AttemptedValue);
Assert.Equal("camera_specs", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Value[0].Name").Value;
Assert.Equal("camera_spec1.txt", entry.AttemptedValue);
Assert.Equal("camera_spec1.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Value[1].Name").Value;
Assert.Equal("camera_spec2.txt", entry.AttemptedValue);
Assert.Equal("camera_spec2.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Key").Value;
Assert.Equal("tyre_specs", entry.AttemptedValue);
Assert.Equal("tyre_specs", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Value[0].Name").Value;
Assert.Equal("tyre_spec1.txt", entry.AttemptedValue);
Assert.Equal("tyre_spec1.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Value[1].Name").Value;
Assert.Equal("tyre_spec2.txt", entry.AttemptedValue);
Assert.Equal("tyre_spec2.txt", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsDictionaryProperty_WithIEnumerableOfKeyValuePair_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "p",
ParameterType = typeof(Car3)
};
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
var queryString = "?p.Name=Accord"
+ "&p.Specs[0].Key=camera_specs"
+ "&p.Specs[0].Value[0].Name=camera_spec1.txt"
+ "&p.Specs[0].Value[1].Name=camera_spec2.txt"
+ "&p.Specs[1].Key=tyre_specs"
+ "&p.Specs[1].Value[0].Name=tyre_spec1.txt"
+ "&p.Specs[1].Value[1].Name=tyre_spec2.txt";
request.QueryString = new QueryString(queryString);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Car3>(modelBindingResult.Model);
Assert.Equal("Accord", model.Name);
Assert.Collection(
model.Specs,
(e) =>
{
Assert.Equal("camera_specs", e.Key);
Assert.Collection(
e.Value,
(s) =>
{
Assert.Equal("camera_spec1.txt", s.Name);
},
(s) =>
{
Assert.Equal("camera_spec2.txt", s.Name);
});
},
(e) =>
{
Assert.Equal("tyre_specs", e.Key);
Assert.Collection(
e.Value,
(s) =>
{
Assert.Equal("tyre_spec1.txt", s.Name);
},
(s) =>
{
Assert.Equal("tyre_spec2.txt", s.Name);
});
});
Assert.Equal(7, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "p.Name").Value;
Assert.Equal("Accord", entry.AttemptedValue);
Assert.Equal("Accord", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Key").Value;
Assert.Equal("camera_specs", entry.AttemptedValue);
Assert.Equal("camera_specs", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Value[0].Name").Value;
Assert.Equal("camera_spec1.txt", entry.AttemptedValue);
Assert.Equal("camera_spec1.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[0].Value[1].Name").Value;
Assert.Equal("camera_spec2.txt", entry.AttemptedValue);
Assert.Equal("camera_spec2.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Key").Value;
Assert.Equal("tyre_specs", entry.AttemptedValue);
Assert.Equal("tyre_specs", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Value[0].Name").Value;
Assert.Equal("tyre_spec1.txt", entry.AttemptedValue);
Assert.Equal("tyre_spec1.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs[1].Value[1].Name").Value;
Assert.Equal("tyre_spec2.txt", entry.AttemptedValue);
Assert.Equal("tyre_spec2.txt", entry.RawValue);
}
private class Order8
{
public string Name { get; set; }
public KeyValuePair<string, int> ProductId { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsKeyValuePairProperty_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order8)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString =
new QueryString("?parameter.Name=bill¶meter.ProductId.Key=key0¶meter.ProductId.Value=10");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order8>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new KeyValuePair<string, int>("key0", 10), model.ProductId);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductId.Key").Value;
Assert.Equal("key0", entry.AttemptedValue);
Assert.Equal("key0", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.ProductId.Value").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsKeyValuePairProperty_EmptyPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order8)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Name=bill&ProductId.Key=key0&ProductId.Value=10");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order8>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(new KeyValuePair<string, int>("key0", 10), model.ProductId);
Assert.Equal(3, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductId.Key").Value;
Assert.Equal("key0", entry.AttemptedValue);
Assert.Equal("key0", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "ProductId.Value").Value;
Assert.Equal("10", entry.AttemptedValue);
Assert.Equal("10", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsKeyValuePairProperty_NoCollectionData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order8)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Name=bill");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order8>(modelBindingResult.Model);
Assert.Equal("bill", model.Name);
Assert.Equal(default(KeyValuePair<string, int>), model.ProductId);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Name").Value;
Assert.Equal("bill", entry.AttemptedValue);
Assert.Equal("bill", entry.RawValue);
}
[Fact]
public async Task MutableObjectModelBinder_BindsKeyValuePairProperty_NoData()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order8)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order8>(modelBindingResult.Model);
Assert.Null(model.Name);
Assert.Equal(default(KeyValuePair<string, int>), model.ProductId);
Assert.Empty(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
}
private class Car4
{
public string Name { get; set; }
public KeyValuePair<string, Dictionary<string, string>> Specs { get; set; }
}
[Fact]
public async Task Foo_MutableObjectModelBinder_BindsKeyValuePairProperty_WithPrefix_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "p",
ParameterType = typeof(Car4)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
var queryString = "?p.Name=Accord"
+ "&p.Specs.Key=camera_specs"
+ "&p.Specs.Value[0].Key=spec1"
+ "&p.Specs.Value[0].Value=spec1.txt"
+ "&p.Specs.Value[1].Key=spec2"
+ "&p.Specs.Value[1].Value=spec2.txt";
request.QueryString = new QueryString(queryString);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Car4>(modelBindingResult.Model);
Assert.Equal("Accord", model.Name);
Assert.Collection(
model.Specs.Value,
(e) =>
{
Assert.Equal("spec1", e.Key);
Assert.Equal("spec1.txt", e.Value);
},
(e) =>
{
Assert.Equal("spec2", e.Key);
Assert.Equal("spec2.txt", e.Value);
});
Assert.Equal(6, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "p.Name").Value;
Assert.Equal("Accord", entry.AttemptedValue);
Assert.Equal("Accord", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs.Key").Value;
Assert.Equal("camera_specs", entry.AttemptedValue);
Assert.Equal("camera_specs", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs.Value[0].Key").Value;
Assert.Equal("spec1", entry.AttemptedValue);
Assert.Equal("spec1", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs.Value[0].Value").Value;
Assert.Equal("spec1.txt", entry.AttemptedValue);
Assert.Equal("spec1.txt", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs.Value[1].Key").Value;
Assert.Equal("spec2", entry.AttemptedValue);
Assert.Equal("spec2", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "p.Specs.Value[1].Value").Value;
Assert.Equal("spec2.txt", entry.AttemptedValue);
Assert.Equal("spec2.txt", entry.RawValue);
}
private class Order9
{
public Person9 Customer { get; set; }
}
private class Person9
{
[FromBody]
public Address1 Address { get; set; }
}
// If a nested POCO object has all properties bound from a greedy source, then it should be populated
// if the top-level object is created.
[Fact]
public async Task MutableObjectModelBinder_BindsNestedPOCO_WithAllGreedyBoundProperties()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order9)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
SetJsonBodyContent(request, AddressBodyContent);
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order9>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.NotNull(model.Customer.Address);
Assert.Equal(AddressStreetContent, model.Customer.Address.Street);
Assert.Empty(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
}
private class Order10
{
[BindRequired]
public Person10 Customer { get; set; }
}
private class Person10
{
public string Name { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_WithRequiredComplexProperty_NoData_GetsErrors()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order10)
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext();
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order10>(modelBindingResult.Model);
Assert.Null(model.Customer);
Assert.Single(modelState);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Customer").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["Customer"].Errors);
Assert.Equal("A value for the 'Customer' property was not provided.", error.ErrorMessage);
}
[Fact]
public async Task MutableObjectModelBinder_WithBindRequired_NoData_AndCustomizedMessage_AddsGivenMessage()
{
// Arrange
var metadataProvider = new TestModelMetadataProvider();
metadataProvider
.ForProperty(typeof(Order10), nameof(Order10.Customer))
.BindingDetails((Action<ModelBinding.Metadata.BindingMetadata>)(binding =>
{
// A real details provider could customize message based on BindingMetadataProviderContext.
binding.ModelBindingMessageProvider.SetMissingBindRequiredValueAccessor(
name => $"Hurts when '{ name }' is not provided.");
}));
var parameterBinder = ModelBindingTestHelper.GetParameterBinder(metadataProvider);
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order10)
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext();
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order10>(modelBindingResult.Model);
Assert.Null(model.Customer);
Assert.Single(modelState);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Customer").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["Customer"].Errors);
Assert.Equal("Hurts when 'Customer' is not provided.", error.ErrorMessage);
}
private class Order11
{
public Person11 Customer { get; set; }
}
private class Person11
{
public int Id { get; set; }
[BindRequired]
public string Name { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_WithNestedRequiredProperty_WithPartialData_GetsErrors()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order11)
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.Customer.Id=123");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order11>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal(123, model.Customer.Id);
Assert.Null(model.Customer.Name);
Assert.Equal(2, modelState.Count);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Id").Value;
Assert.Equal("123", entry.RawValue);
Assert.Equal("123", entry.AttemptedValue);
entry = Assert.Single(modelState, e => e.Key == "parameter.Customer.Name").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["parameter.Customer.Name"].Errors);
Assert.Equal("A value for the 'Name' property was not provided.", error.ErrorMessage);
}
[Fact]
public async Task MutableObjectModelBinder_WithNestedRequiredProperty_WithData_EmptyPrefix_GetsErrors()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order11)
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?Customer.Id=123");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order11>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal(123, model.Customer.Id);
Assert.Null(model.Customer.Name);
Assert.Equal(2, modelState.Count);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Customer.Id").Value;
Assert.Equal("123", entry.RawValue);
Assert.Equal("123", entry.AttemptedValue);
entry = Assert.Single(modelState, e => e.Key == "Customer.Name").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["Customer.Name"].Errors);
Assert.Equal("A value for the 'Name' property was not provided.", error.ErrorMessage);
}
[Fact]
public async Task MutableObjectModelBinder_WithNestedRequiredProperty_WithData_CustomPrefix_GetsErrors()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order11),
BindingInfo = new BindingInfo()
{
BinderModelName = "customParameter"
}
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?customParameter.Customer.Id=123");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order11>(modelBindingResult.Model);
Assert.NotNull(model.Customer);
Assert.Equal(123, model.Customer.Id);
Assert.Null(model.Customer.Name);
Assert.Equal(2, modelState.Count);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "customParameter.Customer.Id").Value;
Assert.Equal("123", entry.RawValue);
Assert.Equal("123", entry.AttemptedValue);
entry = Assert.Single(modelState, e => e.Key == "customParameter.Customer.Name").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["customParameter.Customer.Name"].Errors);
Assert.Equal("A value for the 'Name' property was not provided.", error.ErrorMessage);
}
private class Order12
{
[BindRequired]
public string ProductName { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_WithRequiredProperty_NoData_GetsErrors()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order12)
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order12>(modelBindingResult.Model);
Assert.Null(model.ProductName);
Assert.Single(modelState);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "ProductName").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["ProductName"].Errors);
Assert.Equal("A value for the 'ProductName' property was not provided.", error.ErrorMessage);
}
[Fact]
public async Task MutableObjectModelBinder_WithRequiredProperty_NoData_CustomPrefix_GetsErros()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order12),
BindingInfo = new BindingInfo()
{
BinderModelName = "customParameter"
}
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order12>(modelBindingResult.Model);
Assert.Null(model.ProductName);
Assert.Single(modelState);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "customParameter.ProductName").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["customParameter.ProductName"].Errors);
Assert.Equal("A value for the 'ProductName' property was not provided.", error.ErrorMessage);
}
[Fact]
public async Task MutableObjectModelBinder_WithRequiredProperty_WithData_EmptyPrefix_GetsBound()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order12),
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?ProductName=abc");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order12>(modelBindingResult.Model);
Assert.Equal("abc", model.ProductName);
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "ProductName").Value;
Assert.Equal("abc", entry.RawValue);
Assert.Equal("abc", entry.AttemptedValue);
}
private class Order13
{
[BindRequired]
public List<int> OrderIds { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_WithRequiredCollectionProperty_NoData_GetsErros()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order13)
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order13>(modelBindingResult.Model);
Assert.Null(model.OrderIds);
Assert.Single(modelState);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "OrderIds").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["OrderIds"].Errors);
Assert.Equal("A value for the 'OrderIds' property was not provided.", error.ErrorMessage);
}
[Fact]
public async Task MutableObjectModelBinder_WithRequiredCollectionProperty_NoData_CustomPrefix_GetsErros()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order13),
BindingInfo = new BindingInfo()
{
BinderModelName = "customParameter"
}
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order13>(modelBindingResult.Model);
Assert.Null(model.OrderIds);
Assert.Single(modelState);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "customParameter.OrderIds").Value;
Assert.Null(entry.RawValue);
Assert.Null(entry.AttemptedValue);
var error = Assert.Single(modelState["customParameter.OrderIds"].Errors);
Assert.Equal("A value for the 'OrderIds' property was not provided.", error.ErrorMessage);
}
[Fact]
public async Task MutableObjectModelBinder_WithRequiredCollectionProperty_WithData_EmptyPrefix_GetsBound()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order13),
};
// No Data
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?OrderIds[0]=123");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order13>(modelBindingResult.Model);
Assert.Equal(new[] { 123 }, model.OrderIds.ToArray());
Assert.Single(modelState);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "OrderIds[0]").Value;
Assert.Equal("123", entry.RawValue);
Assert.Equal("123", entry.AttemptedValue);
}
private class Order14
{
public int ProductId { get; set; }
}
// This covers the case where a key is present, but has an empty value. The type converter
// will report an error.
[Fact]
public async Task MutableObjectModelBinder_BindsPOCO_TypeConvertedPropertyNonConvertableValue_GetsError()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order14)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.ProductId=");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order14>(modelBindingResult.Model);
Assert.NotNull(model);
Assert.Equal(0, model.ProductId);
Assert.Single(modelState);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "parameter.ProductId").Value;
Assert.Equal(string.Empty, entry.AttemptedValue);
Assert.Equal(string.Empty, entry.RawValue);
var error = Assert.Single(entry.Errors);
Assert.Equal("The value '' is invalid.", error.ErrorMessage);
Assert.Null(error.Exception);
}
// This covers the case where a key is present, but has no value. The model binder will
// report and error because it's a value type (non-nullable).
[Fact]
[ReplaceCulture]
public async Task MutableObjectModelBinder_BindsPOCO_TypeConvertedPropertyWithEmptyValue_Error()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Order14)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString("?parameter.ProductId");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Order14>(modelBindingResult.Model);
Assert.NotNull(model);
Assert.Equal(0, model.ProductId);
var entry = Assert.Single(modelState);
Assert.Equal("parameter.ProductId", entry.Key);
Assert.Equal(string.Empty, entry.Value.AttemptedValue);
var error = Assert.Single(entry.Value.Errors);
Assert.Equal("The value '' is invalid.", error.ErrorMessage, StringComparer.Ordinal);
Assert.Null(error.Exception);
Assert.Equal(1, modelState.ErrorCount);
Assert.False(modelState.IsValid);
}
private class Person12
{
public Address12 Address { get; set; }
}
[ModelBinder(Name = "HomeAddress")]
private class Address12
{
public string Street { get; set; }
}
// Make sure the metadata is honored when a [ModelBinder] attribute is associated with a class somewhere in the
// type hierarchy of an action parameter. This should behave identically to such an attribute on a property in
// the type hierarchy.
[Theory]
[MemberData(
nameof(BinderTypeBasedModelBinderIntegrationTest.NullAndEmptyBindingInfo),
MemberType = typeof(BinderTypeBasedModelBinderIntegrationTest))]
public async Task ModelNameOnPropertyType_WithData_Succeeds(BindingInfo bindingInfo)
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor
{
Name = "parameter-name",
BindingInfo = bindingInfo,
ParameterType = typeof(Person12),
};
var testContext = ModelBindingTestHelper.GetTestContext(
request => request.QueryString = new QueryString("?HomeAddress.Street=someStreet"));
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var person = Assert.IsType<Person12>(modelBindingResult.Model);
Assert.NotNull(person.Address);
Assert.Equal("someStreet", person.Address.Street, StringComparer.Ordinal);
Assert.True(modelState.IsValid);
var kvp = Assert.Single(modelState);
Assert.Equal("HomeAddress.Street", kvp.Key);
var entry = kvp.Value;
Assert.NotNull(entry);
Assert.Empty(entry.Errors);
Assert.Equal(ModelValidationState.Valid, entry.ValidationState);
}
// Make sure the metadata is honored when a [ModelBinder] attribute is associated with an action parameter's
// type. This should behave identically to such an attribute on an action parameter.
[Theory]
[MemberData(
nameof(BinderTypeBasedModelBinderIntegrationTest.NullAndEmptyBindingInfo),
MemberType = typeof(BinderTypeBasedModelBinderIntegrationTest))]
public async Task ModelNameOnParameterType_WithData_Succeeds(BindingInfo bindingInfo)
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor
{
Name = "parameter-name",
BindingInfo = bindingInfo,
ParameterType = typeof(Address12),
};
var testContext = ModelBindingTestHelper.GetTestContext(
request => request.QueryString = new QueryString("?HomeAddress.Street=someStreet"));
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var address = Assert.IsType<Address12>(modelBindingResult.Model);
Assert.Equal("someStreet", address.Street, StringComparer.Ordinal);
Assert.True(modelState.IsValid);
var kvp = Assert.Single(modelState);
Assert.Equal("HomeAddress.Street", kvp.Key);
var entry = kvp.Value;
Assert.NotNull(entry);
Assert.Empty(entry.Errors);
Assert.Equal(ModelValidationState.Valid, entry.ValidationState);
}
private class Person13
{
public Address13 Address { get; set; }
}
[Bind("Street")]
private class Address13
{
public int Number { get; set; }
public string Street { get; set; }
public string City { get; set; }
public string State { get; set; }
}
// Make sure the metadata is honored when a [Bind] attribute is associated with a class somewhere in the type
// hierarchy of an action parameter. This should behave identically to such an attribute on a property in the
// type hierarchy. (Test is similar to ModelNameOnPropertyType_WithData_Succeeds() but covers implementing
// IPropertyFilterProvider, not IModelNameProvider.)
[Theory]
[MemberData(
nameof(BinderTypeBasedModelBinderIntegrationTest.NullAndEmptyBindingInfo),
MemberType = typeof(BinderTypeBasedModelBinderIntegrationTest))]
public async Task BindAttributeOnPropertyType_WithData_Succeeds(BindingInfo bindingInfo)
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor
{
Name = "parameter-name",
BindingInfo = bindingInfo,
ParameterType = typeof(Person13),
};
var testContext = ModelBindingTestHelper.GetTestContext(
request => request.QueryString = new QueryString(
"?Address.Number=23&Address.Street=someStreet&Address.City=Redmond&Address.State=WA"));
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var person = Assert.IsType<Person13>(modelBindingResult.Model);
Assert.NotNull(person.Address);
Assert.Null(person.Address.City);
Assert.Equal(0, person.Address.Number);
Assert.Null(person.Address.State);
Assert.Equal("someStreet", person.Address.Street, StringComparer.Ordinal);
Assert.True(modelState.IsValid);
var kvp = Assert.Single(modelState);
Assert.Equal("Address.Street", kvp.Key);
var entry = kvp.Value;
Assert.NotNull(entry);
Assert.Empty(entry.Errors);
Assert.Equal(ModelValidationState.Valid, entry.ValidationState);
}
// Make sure the metadata is honored when a [Bind] attribute is associated with an action parameter's type.
// This should behave identically to such an attribute on an action parameter. (Test is similar
// to ModelNameOnParameterType_WithData_Succeeds() but covers implementing IPropertyFilterProvider, not
// IModelNameProvider.)
[Theory]
[MemberData(
nameof(BinderTypeBasedModelBinderIntegrationTest.NullAndEmptyBindingInfo),
MemberType = typeof(BinderTypeBasedModelBinderIntegrationTest))]
public async Task BindAttributeOnParameterType_WithData_Succeeds(BindingInfo bindingInfo)
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor
{
Name = "parameter-name",
BindingInfo = bindingInfo,
ParameterType = typeof(Address13),
};
var testContext = ModelBindingTestHelper.GetTestContext(
request => request.QueryString = new QueryString("?Number=23&Street=someStreet&City=Redmond&State=WA"));
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var address = Assert.IsType<Address13>(modelBindingResult.Model);
Assert.Null(address.City);
Assert.Equal(0, address.Number);
Assert.Null(address.State);
Assert.Equal("someStreet", address.Street, StringComparer.Ordinal);
Assert.True(modelState.IsValid);
var kvp = Assert.Single(modelState);
Assert.Equal("Street", kvp.Key);
var entry = kvp.Value;
Assert.NotNull(entry);
Assert.Empty(entry.Errors);
Assert.Equal(ModelValidationState.Valid, entry.ValidationState);
}
private class Product
{
public int ProductId { get; set; }
public string Name { get; }
public IList<string> Aliases { get; }
}
[Theory]
[InlineData("?parameter.ProductId=10")]
[InlineData("?parameter.ProductId=10¶meter.Name=Camera")]
[InlineData("?parameter.ProductId=10¶meter.Name=Camera¶meter.Aliases[0]=Camera1")]
public async Task ComplexTypeModelBinder_BindsSettableProperties(string queryString)
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Product)
};
// Need to have a key here so that the ComplexTypeModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.QueryString = new QueryString(queryString);
SetJsonBodyContent(request, AddressBodyContent);
});
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
var model = Assert.IsType<Product>(modelBindingResult.Model);
Assert.NotNull(model);
Assert.Equal(10, model.ProductId);
Assert.Null(model.Name);
Assert.Null(model.Aliases);
}
private class Photo
{
public string Id { get; set; }
public KeyValuePair<string, LocationInfo> Info { get; set; }
}
private class LocationInfo
{
[FromHeader]
public string GpsCoordinates { get; set; }
public int Zipcode { get; set; }
}
[Fact]
public async Task MutableObjectModelBinder_BindsKeyValuePairProperty_HavingFromHeaderProperty_Success()
{
// Arrange
var parameterBinder = ModelBindingTestHelper.GetParameterBinder();
var parameter = new ParameterDescriptor()
{
Name = "parameter",
ParameterType = typeof(Photo)
};
// Need to have a key here so that the MutableObjectModelBinder will recurse to bind elements.
var testContext = ModelBindingTestHelper.GetTestContext(request =>
{
request.Headers.Add("GpsCoordinates", "10,20");
request.QueryString = new QueryString("?Id=1&Info.Key=location1&Info.Value.Zipcode=98052");
});
var modelState = testContext.ModelState;
var valueProvider = await CompositeValueProvider.CreateAsync(testContext);
// Act
var modelBindingResult = await parameterBinder.BindModelAsync(testContext, valueProvider, parameter);
// Assert
Assert.True(modelBindingResult.IsModelSet);
// Model
var model = Assert.IsType<Photo>(modelBindingResult.Model);
Assert.Equal("1", model.Id);
Assert.Equal("location1", model.Info.Key);
Assert.NotNull(model.Info.Value);
Assert.Equal("10,20", model.Info.Value.GpsCoordinates);
Assert.Equal(98052, model.Info.Value.Zipcode);
// ModelState
Assert.Equal(4, modelState.Count);
Assert.Equal(0, modelState.ErrorCount);
Assert.True(modelState.IsValid);
var entry = Assert.Single(modelState, e => e.Key == "Id").Value;
Assert.Equal("1", entry.AttemptedValue);
Assert.Equal("1", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "Info.Key").Value;
Assert.Equal("location1", entry.AttemptedValue);
Assert.Equal("location1", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "Info.Value.Zipcode").Value;
Assert.Equal("98052", entry.AttemptedValue);
Assert.Equal("98052", entry.RawValue);
entry = Assert.Single(modelState, e => e.Key == "Info.Value.GpsCoordinates").Value;
Assert.Equal("10,20", entry.AttemptedValue);
Assert.Equal(new[] { "10", "20" }, entry.RawValue);
}
private static void SetJsonBodyContent(HttpRequest request, string content)
{
var stream = new MemoryStream(new UTF8Encoding(encoderShouldEmitUTF8Identifier: false).GetBytes(content));
request.Body = stream;
request.ContentType = "application/json";
}
private static void SetFormFileBodyContent(HttpRequest request, string content, string name)
{
const string fileName = "text.txt";
var fileCollection = new FormFileCollection();
var formCollection = new FormCollection(new Dictionary<string, StringValues>(), fileCollection);
var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(content));
request.Form = formCollection;
request.ContentType = "multipart/form-data; boundary=----WebKitFormBoundarymx2fSWqWSd0OxQqq";
request.Headers["Content-Disposition"] = $"form-data; name={name}; filename={fileName}";
fileCollection.Add(new FormFile(memoryStream, 0, memoryStream.Length, name, fileName)
{
Headers = request.Headers
});
}
}
} | 40.493592 | 126 | 0.590516 | [
"Apache-2.0"
] | aneequrrehman/Mvc | test/Microsoft.AspNetCore.Mvc.IntegrationTests/ComplexTypeModelBinderIntegrationTest.cs | 110,588 | C# |
using AutoMapper;
using DotNetCqrsDemo.Domain.Commands;
using DotNetCqrsDemo.Domain.Events;
using DotNetCqrsDemo.Domain.ReadModel;
using DotNetCqrsDemo.Web.Commands.Requests.Locations;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace DotNetCqrsDemo.Web.Commands.AutoMapperConfig
{
public class LocationProfile : Profile
{
public LocationProfile()
{
CreateMap<CreateLocationRequest, CreateLocationCommand>()
.ConstructUsing(x => new CreateLocationCommand(Guid.NewGuid(), x.LocationID, x.StreetAddress, x.City, x.State, x.PostalCode));
CreateMap<LocationCreatedEvent, LocationRM>()
.ForMember(dest => dest.AggregateID, opt => opt.MapFrom(src => src.Id));
}
}
}
| 32.48 | 142 | 0.714286 | [
"MIT"
] | LambertW/DotNetCqrsDemo | DotNetCqrsDemo.Web.Commands/AutoMapperConfig/LocationProfile.cs | 814 | C# |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using Microsoft.Azure.Management.HDInsight.Models;
using System.Collections.Generic;
using System.Linq;
namespace Microsoft.Azure.Commands.HDInsight.Models.Management
{
public class AzureHDInsightCapabilities
{
public AzureHDInsightCapabilities(CapabilitiesResult capabilitiesResult)
{
this.Versions = capabilitiesResult?.Versions?.ToDictionary(item => item.Key, item => new AzureHDInsightVersionsCapability(item.Value));
this.Regions = capabilitiesResult?.Regions?.ToDictionary(item => item.Key, item => new AzureHDInsightRegionsCapability(item.Value));
this.VmSizes = capabilitiesResult?.VmSizes?.ToDictionary(item => item.Key, item => new AzureHDInsightVmSizesCapability(item.Value));
this.VmSizeFilters = capabilitiesResult?.VmSizeFilters?.Select(val => new AzureHDInsightVmSizeCompatibilityFilter(val)).ToList();
this.Features = capabilitiesResult?.Features;
this.Quota = new AzureHDInsightQuotaCapability(capabilitiesResult?.Quota);
}
/// <summary>
/// The available cluster component versions.
/// </summary>
public IDictionary<string, AzureHDInsightVersionsCapability> Versions { get; set; }
/// <summary>
/// The available regions.
/// </summary>
public IDictionary<string, AzureHDInsightRegionsCapability> Regions { get; set; }
/// <summary>
/// The available vm sizes.
/// </summary>
public IDictionary<string, AzureHDInsightVmSizesCapability> VmSizes { get; set; }
/// <summary>
/// The vmsize filters.
/// </summary>
public IList<AzureHDInsightVmSizeCompatibilityFilter> VmSizeFilters { get; set; }
/// <summary>
/// The supported features.
/// </summary>
public IList<string> Features { get; set; }
/// <summary>
/// The quota capability.
/// </summary>
public AzureHDInsightQuotaCapability Quota { get; set; }
}
}
| 41.536232 | 148 | 0.623168 | [
"MIT"
] | 3quanfeng/azure-powershell | src/HDInsight/HDInsight/Models/Management/AzureHDInsightCapabilities.cs | 2,800 | C# |
using NSubstitute;
using Received = NSubstitute.Received;
namespace TestStack.Seleno.Tests.Configuration.SelenoApplication
{
class When_disposing_initialised_seleno_application : SelenoApplicationSpecification
{
public void Given_initialised_seleno_application()
{
SUT.Initialize();
}
public void When_disposing_application()
{
SUT.Dispose();
}
public void Then_dispose_browser_and_webserver_followed_by_container()
{
Received.InOrder(() => {
WebDriver.Received().Close();
WebServer.Received().Stop();
ContainerDisposal.Received().Dispose();
});
}
}
}
| 27.25 | 89 | 0.587156 | [
"MIT"
] | Sorrell-Solutions/TestStack.Seleno | src/TestStack.Seleno.Tests/Configuration/SelenoApplication/When_disposing_initialised_seleno_application.cs | 738 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
namespace Hanssens.Net.Extensions
{
public partial class Reflection
{
/// <summary>
/// Determines if the *Type* of an object instance is nullable,
/// regardless if it is a value type or reference type.
/// </summary>
public static bool IsNullableType<T>(T obj)
{
// the obvious and simplest, perhaps even cheapest, comparison
// is simply inspecting the value
if (obj == null) return true;
var type = typeof(T);
if (type.IsGenericParameter)
{
// determine for a generic type Nullable<T>
return (type.IsGenericParameter && type.GetGenericTypeDefinition() == typeof(Nullable<>));
}
else
{
// determine for a non-generic type
return (Nullable.GetUnderlyingType(type) != null);
}
}
}
}
| 29.527778 | 106 | 0.566322 | [
"MIT"
] | hanssens/extensions | src/Hanssens.Net/Extensions/Reflection/IsNullableType.cs | 1,065 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Xamarin.QuickUI;
namespace QuickUIDemo
{
public static class DemoApp
{
public static Page GetMainPage ()
{
var result = new NavigationPage (new WelcomePage ());
return result;
}
}
public class DataPage : ContentPage
{
public DataPage ()
{
Title = "Person List";
var itemList = new[] {
new { Name = "Jason Smith", Title = "Engineer" },
new { Name = "Seth Rosetter", Title = "Jr. Engineer" },
new { Name = "Eric M", Title = "Engineer" },
new { Name = "David S", Title = "Designer" },
new { Name = "Vinny D", Title = "Designer" },
new { Name = "Nat Friedman", Title = "CEO" },
new { Name = "Miguel de Icaza", Title = "CTO" },
};
var listView = new ListView {
ItemSource = itemList,
Template = new CellTemplate (typeof (TextCell))
};
listView.Template.SetBinding (TextCell.TextProperty, new Binding ("Name"));
listView.Template.SetBinding (TextCell.DetailProperty, new Binding ("Title"));
Content = listView;
}
}
public class WelcomePage : ContentPage
{
StackLayout mainLayout;
Entry usernameEntry;
Entry passwordEntry;
Button loginButton;
public WelcomePage ()
{
Title = "QuickUI Demo";
Content = mainLayout = new StackLayout {
Padding = new Thickness(20, 20, 20, 100),
Spacing = 10,
Orientation = StackLayout.StackOrientation.Vertical,
VerticalOptions = LayoutOptions.Center
};
mainLayout.Add (new Label {
Text = "Welcome to the QuickUI Demo App!",
XAlign = TextAlignment.Center
});
mainLayout.Add (usernameEntry = new Entry { Placeholder = "Username" });
mainLayout.Add (passwordEntry = new Entry {
Placeholder = "Password",
Password = true
});
mainLayout.Add (loginButton = new Button {
Text = "Login",
IsEnabled = false
});
usernameEntry.ValueChanged += (sender, arg) => loginButton.IsEnabled = LoginAvailable ();
passwordEntry.ValueChanged += (sender, arg) => loginButton.IsEnabled = LoginAvailable ();
loginButton.Activated += (sender, args) => {
if (LoginAvailable ()) {
Navigation.Push (new DataPage ());
}
};
}
bool LoginAvailable ()
{
return !string.IsNullOrWhiteSpace (usernameEntry.Text) && !string.IsNullOrWhiteSpace (passwordEntry.Text);
}
}
}
| 25.430108 | 109 | 0.6537 | [
"MIT"
] | Samples-Playgrounds/Samples.Xamarin.Forms | diverse/forum-samples/02-NavigationWithContentPages/iOS/DemoApp.cs | 2,367 | C# |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// <auto-generated/>
// Template Source: Templates\CSharp\Requests\IMethodRequestBuilder.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
/// <summary>
/// The interface IAndroidManagedStoreAccountEnterpriseSettingsCompleteSignupRequestBuilder.
/// </summary>
public partial interface IAndroidManagedStoreAccountEnterpriseSettingsCompleteSignupRequestBuilder
{
/// <summary>
/// Builds the request.
/// </summary>
/// <param name="options">The query and header options for the request.</param>
/// <returns>The built request.</returns>
IAndroidManagedStoreAccountEnterpriseSettingsCompleteSignupRequest Request(IEnumerable<Option> options = null);
}
}
| 41.068966 | 153 | 0.624685 | [
"MIT"
] | OfficeGlobal/msgraph-beta-sdk-dotnet | src/Microsoft.Graph/Requests/Generated/IAndroidManagedStoreAccountEnterpriseSettingsCompleteSignupRequestBuilder.cs | 1,191 | C# |
using System;
using CryptoExchange.Net.Converters;
using Kraken.Net.Converters;
using Newtonsoft.Json;
namespace Kraken.Net.Objects
{
/// <summary>
/// Ledger entry info
/// </summary>
public class KrakenLedgerEntry
{
/// <summary>
/// Reference id
/// </summary>
[JsonProperty("refid")]
public string ReferenceId { get; set; } = string.Empty;
/// <summary>
/// Timestamp
/// </summary>
[JsonProperty("time"), JsonConverter(typeof(TimestampSecondsConverter))]
public DateTime Timestamp { get; set; }
/// <summary>
/// The type of entry
/// </summary>
[JsonConverter(typeof(LedgerEntryTypeConverter))]
public LedgerEntryType Type { get; set; }
/// <summary>
/// Class of the asset
/// </summary>
[JsonProperty("aclass")]
public string AssetClass { get; set; } = string.Empty;
/// <summary>
/// Name of the asset
/// </summary>
public string Asset { get; set; } = string.Empty;
/// <summary>
/// The quantity of the entry
/// </summary>
[JsonProperty("amount")]
public decimal Quantity { get; set; }
/// <summary>
/// Fee paid
/// </summary>
public decimal Fee { get; set; }
/// <summary>
/// Resulting balance
/// </summary>
[JsonProperty("balance")]
public decimal BalanceAfter { get; set; }
}
}
| 27.781818 | 80 | 0.533377 | [
"MIT"
] | JKorf/Kraken.Net | Kraken.Net/Objects/KrakenLedgerEntry.cs | 1,530 | C# |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// Template Source: Templates\CSharp\Requests\IEntityCollectionPage.cs.tt
namespace Microsoft.Graph
{
using System;
using Newtonsoft.Json;
/// <summary>
/// The interface IManagedAppRegistrationIntendedPoliciesCollectionPage.
/// </summary>
[JsonConverter(typeof(InterfaceConverter<ManagedAppRegistrationIntendedPoliciesCollectionPage>))]
public interface IManagedAppRegistrationIntendedPoliciesCollectionPage : ICollectionPage<ManagedAppPolicy>
{
/// <summary>
/// Gets the next page <see cref="IManagedAppRegistrationIntendedPoliciesCollectionRequest"/> instance.
/// </summary>
IManagedAppRegistrationIntendedPoliciesCollectionRequest NextPageRequest { get; }
/// <summary>
/// Initializes the NextPageRequest property.
/// </summary>
void InitializeNextPageRequest(IBaseClient client, string nextPageLinkString);
}
}
| 41.5 | 153 | 0.646084 | [
"MIT"
] | AzureMentor/msgraph-sdk-dotnet | src/Microsoft.Graph/Requests/Generated/IManagedAppRegistrationIntendedPoliciesCollectionPage.cs | 1,328 | C# |
using System;
using System.Windows;
using System.Windows.Interactivity;
using ColorFinder.Models;
using ColorFinder.ViewModels;
using ColorFinder.Views;
using Microsoft.Practices.Prism.Interactivity.InteractionRequest;
namespace ColorFinder.TriggerActions
{
/// <summary>
/// スポイトウィンドウを表示します。
/// </summary>
public class ShowDropperDialogAction : TriggerAction<Window>
{
protected override void Invoke(object parameter)
{
var arg = (InteractionRequestedEventArgs)parameter;
var confirmation = (Confirmation)arg.Context;
var dropperWindow = new DropperWindow();
var dropperWindowViewModel = (DropperWindowViewModel)dropperWindow.DataContext;
AssociatedObject.Hide();
dropperWindow.ShowDialog();
AssociatedObject.Show();
AssociatedObject.Activate();
confirmation.Confirmed = dropperWindowViewModel.Confirmed;
confirmation.Content = new ColorCode { R = dropperWindowViewModel.R.Value, G = dropperWindowViewModel.G.Value, B = dropperWindowViewModel.B.Value };
arg.Callback();
}
}
} | 35.181818 | 160 | 0.687339 | [
"MIT"
] | aridai/ColorFinder | TriggerActions/ShowDropperDialogAction.cs | 1,195 | C# |
namespace YifySharp
{
public class YifyResponse<T>
{
/// <summary>
/// The status of the API call, will be either 'ok' or 'error'
/// </summary>
public string Status { get; set; }
/// <summary>
/// The status message of the API call
/// </summary>
public string StatusMessage { get; set; }
/// <summary>
/// The results returned from the query
/// </summary>
public T Data { get; set; }
}
} | 25.05 | 70 | 0.510978 | [
"MIT"
] | JohnTheGr8/YifySharp | src/YifySharp/Response.cs | 503 | C# |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("AnalysisTests")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("AnalysisTests")]
[assembly: AssemblyCopyright("Copyright © Autodesk, Inc 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("9863bd2e-1907-4e1c-8aad-80928af43005")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 38.162162 | 84 | 0.746459 | [
"Apache-2.0",
"MIT"
] | frankfralick/Dynamo | test/Libraries/AnalysisTests/Properties/AssemblyInfo.cs | 1,415 | C# |
namespace Eastern.Protocol.Operations
{
internal class CommandPayload
{
internal CommandPayloadType Type { get; set; }
internal string Language { get; set; }
internal string Text { get; set; }
internal int NonTextLimit { get; set; }
internal string FetchPlan { get; set; }
internal byte[] SerializedParams { get; set; }
}
}
| 28.5 | 55 | 0.60401 | [
"MIT"
] | yojimbo87/Eastern | src/Eastern/Protocol/Operations/CommandPayload.cs | 401 | C# |
using System;
using System.Linq;
using System.Linq.Expressions;
using EasySql.Infrastructure;
using EasySql.Query.SqlExpressions;
namespace EasySql.Query
{
public class QueryTranslator : ExpressionVisitor, IQueryTranslator
{
private readonly QueryExpression _queryExpression = new QueryExpression();
private readonly QueryContext _queryContext;
private readonly IEntityConfiguration _entityConfiguration;
private readonly IQueryableMethodTranslator _queryableMethodTranslator;
public QueryTranslator(QueryContext queryContext)
{
_queryContext = queryContext;
_entityConfiguration = queryContext.EntityConfiguration;
_queryableMethodTranslator = queryContext.QueryableMethodTranslator;
}
public SqlExpression Translate(Expression expression)
{
var result = Visit(expression);
return _queryExpression;
}
public override Expression Visit(Expression node)
{
return base.Visit(node);
}
protected override Expression VisitExtension(Expression node)
{
if (node is EntityQueryExpression entityQueryExpression)
{
var entityDefinition = entityQueryExpression.EntityDefintion;
_queryExpression.SetTable(entityDefinition);
return _queryExpression;
}
return base.VisitExtension(node);
}
private LambdaExpression GetLambdaExpression(Expression node)
{
if (node.NodeType == ExpressionType.Quote && node is UnaryExpression unaryExpression)
{
return unaryExpression.Operand as LambdaExpression;
}
return node as LambdaExpression;
}
protected override Expression VisitMethodCall(MethodCallExpression node)
{
if (node.Method.DeclaringType == typeof(Queryable))
{
var source = Visit(node.Arguments[0]) as QueryExpression;
switch (node.Method.Name)
{
case nameof(Queryable.All):
TranslateAll(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.Any) when node.Arguments.Count == 1:
TranslateAny();
break;
case nameof(Queryable.Any) when node.Arguments.Count == 2:
TranslateAny(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.AsQueryable):
break;
case nameof(Queryable.Average):
TranslateAverage(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.Contains): break;
case nameof(Queryable.Count) when node.Arguments.Count == 1:
TranslateCount();
break;
case nameof(Queryable.Count) when node.Arguments.Count == 2:
TranslateCount(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.LongCount) when node.Arguments.Count == 1:
TranslateCount();
break;
case nameof(Queryable.LongCount) when node.Arguments.Count == 2:
TranslateCount(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.DefaultIfEmpty):
TranslateDefaultIfEmpty();
break;
case nameof(Queryable.Distinct):
TranslateDistinct(source);
break;
case nameof(Queryable.ElementAt):
throw new NotSupportedException();
case nameof(Queryable.ElementAtOrDefault):
throw new NotSupportedException();
case nameof(Queryable.First) when node.Arguments.Count == 1:
TranslateFirst();
break;
case nameof(Queryable.First) when node.Arguments.Count == 2:
TranslateFirst(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.FirstOrDefault) when node.Arguments.Count == 1:
TranslateFirst();
break;
case nameof(Queryable.FirstOrDefault) when node.Arguments.Count == 2:
TranslateFirst(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.GroupBy):
TranslateGroupBy(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.GroupJoin): break;
case nameof(Queryable.Join): break;
case nameof(Queryable.Last) when node.Arguments.Count == 1:
case nameof(Queryable.LastOrDefault) when node.Arguments.Count == 1:
TranslateLast();
break;
case nameof(Queryable.Last) when node.Arguments.Count == 2:
case nameof(Queryable.LastOrDefault) when node.Arguments.Count == 2:
TranslateLast(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.Max) when node.Arguments.Count == 1:
throw new NotSupportedException();
case nameof(Queryable.Max) when node.Arguments.Count == 2:
TranslateMax(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.Min) when node.Arguments.Count == 1:
throw new NotSupportedException();
case nameof(Queryable.Min) when node.Arguments.Count == 2:
TranslateMin(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.OrderBy):
TranslateOrderBy(GetLambdaExpression(node.Arguments[1]), false);
break;
case nameof(Queryable.OrderByDescending):
TranslateOrderBy(GetLambdaExpression(node.Arguments[1]), true);
break;
case nameof(Queryable.Select):
TranslateSelect(source, GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.SelectMany):
throw new NotSupportedException();
case nameof(Queryable.Single) when node.Arguments.Count == 1:
TranslateSingle();
break;
case nameof(Queryable.Single) when node.Arguments.Count == 2:
TranslateSingle(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.SingleOrDefault) when node.Arguments.Count == 1:
TranslateSingle();
break;
case nameof(Queryable.SingleOrDefault) when node.Arguments.Count == 2:
TranslateSingle(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.Skip):
TranslateSkip(node.Arguments[1] as ConstantExpression);
break;
#if NETSTANDARD2_1_OR_GREATER
case nameof(Queryable.SkipLast):
#endif
case nameof(Queryable.SkipWhile):
throw new NotSupportedException();
case nameof(Queryable.Sum):
TranslateSum(GetLambdaExpression(node.Arguments[1]));
break;
case nameof(Queryable.Take):
TranslateTake(node.Arguments[1] as ConstantExpression);
break;
#if NETSTANDARD2_1_OR_GREATER1
case nameof(Queryable.TakeLast):
#endif
case nameof(Queryable.TakeWhile):
throw new NotSupportedException();
case nameof(Queryable.ThenBy):
TranslateOrderBy(GetLambdaExpression(node.Arguments[1]), false);
break;
case nameof(Queryable.ThenByDescending):
TranslateOrderBy(GetLambdaExpression(node.Arguments[1]), true);
break;
case nameof(Queryable.Union): break;
case nameof(Queryable.Where):
return TranslateWhere(GetLambdaExpression(node.Arguments[1]));
}
return source;
}
else
{
var result = _queryableMethodTranslator.Translate(this, node);
if (result != node)
return result;
}
return base.VisitMethodCall(node);
}
protected override Expression VisitBinary(BinaryExpression node)
{
if (node.Left is ConstantExpression constant1 && constant1.Value == null)
{
var o = Visit(node.Right);
return new SqlUnaryExpression(o, node.NodeType, typeof(bool));
}
if (node.Right is ConstantExpression constant2 && constant2.Value == null)
{
var o = Visit(node.Left);
return new SqlUnaryExpression(o, node.NodeType, typeof(bool));
}
var n1 = Visit(node.Left);
var n2 = Visit(node.Right);
return new SqlBinaryExpression(n1, n2, node.NodeType);
}
protected override Expression VisitUnary(UnaryExpression node)
{
var nodeTypes = new[] {
ExpressionType.Not,
ExpressionType.NotEqual,
ExpressionType.Equal,
ExpressionType.Negate,
};
if (nodeTypes.Contains(node.NodeType))
{
var o = Visit(node.Operand);
return new SqlUnaryExpression(o, node.NodeType, node.Type);
}
if (node.NodeType == ExpressionType.Convert)
{
return Visit(node.Operand);
}
return node;
}
protected override Expression VisitConstant(ConstantExpression node)
{
return new SqlConstantExpression(node.Value);
}
protected override Expression VisitParameter(ParameterExpression node)
{
var entity = _entityConfiguration.Find(node.Type);
if (entity != null)
{
if (_queryExpression.Table.Entity.EntityType == entity.EntityType)
{
return _queryExpression.Table;
}
return new TableExpression(entity, node.Name);
}
return node;
}
protected override Expression VisitMember(MemberExpression node)
{
var innerExpression = Visit(node.Expression);
if (innerExpression is TableExpression table)
{
var column = table.Entity.FindColumn(node.Member);
if (column == null)
{
throw new Exception($"The column '{node.Member}' not found.");
}
return new ColumnExpression(column, null, table.Alias);
}
return node;
}
protected override Expression VisitNew(NewExpression node)
{
foreach (var item in node.Members)
{
// _queryExpression.AddProjection(new ColumnExpression(null, item.Name));
}
return node;
}
protected virtual Expression TranslateWhere(LambdaExpression node)
{
// rewrite
if (node.Body is MemberExpression memberExpression)
{
var result = new SqlBinaryExpression(Visit(memberExpression), new SqlConstantExpression(true), ExpressionType.Equal);
_queryExpression.ApplyPredicate(result);
return node;
}
else
{
var result = Visit(node.Body);
_queryExpression.ApplyPredicate(result);
return node;
}
}
protected virtual Expression TranslateAll(LambdaExpression node = null)
{
if (node != null)
{
TranslateWhere(node);
}
var exists = new ExistsExpression((SqlExpression)_queryExpression.Predicate, true);
_queryExpression.ClearProjections();
_queryExpression.AddProjection(exists);
_queryExpression.ApplyLimit(1);
_queryExpression.ChangeResultType(QueryResultType.Single);
return node;
}
protected virtual Expression TranslateAny(LambdaExpression node = null)
{
if (node != null)
{
TranslateWhere(node);
}
var exists = new ExistsExpression((SqlExpression)_queryExpression.Predicate, false);
_queryExpression.ClearProjections();
_queryExpression.AddProjection(exists);
_queryExpression.ApplyLimit(1);
_queryExpression.ChangeResultType(QueryResultType.Single);
return node;
}
protected virtual Expression TranslateAverage(LambdaExpression node)
{
return TranslateAggregate("AVG", node);
}
protected virtual Expression TranslateMax(LambdaExpression node)
{
return TranslateAggregate("MAX", node);
}
protected virtual Expression TranslateMin(LambdaExpression node)
{
return TranslateAggregate("MIN", node);
}
protected virtual Expression TranslateSum(LambdaExpression node)
{
return TranslateAggregate("SUM", node);
}
protected virtual Expression TranslateCount(LambdaExpression node = null)
{
return TranslateAggregate("COUNT", node);
}
protected virtual Expression TranslateAggregate(string name, LambdaExpression node = null)
{
_queryExpression.ClearProjections();
_queryExpression.ChangeResultType(QueryResultType.Single);
if (node != null)
{
if (node.ReturnType.IsValueType)
{
TranslateWhere(node);
_queryExpression.AddProjection(new ProjectionExpression(null, new SqlFunctionExpression(null, name, new SqlExpression[] { new SqlFragmentExpression("*") })));
}
else
{
var column = Visit(node.Body) as ColumnExpression;
if (column == null)
{
throw new Exception("Translate error.");
}
_queryExpression.AddProjection(new ProjectionExpression(null, new SqlFunctionExpression(null, name, new SqlExpression[] { column })));
}
}
else
{
_queryExpression.AddProjection(new ProjectionExpression(null, new SqlFunctionExpression(null, name, new SqlExpression[] { new SqlFragmentExpression("*") })));
}
return node;
}
//protected virtual Expression TranslateContains(Expression node)
//{
// var value = Visit(node);
// // TODO
// // _queryExpression.ApplyPredicate();
// return node;
//}
protected virtual Expression TranslateDefaultIfEmpty()
{
throw new NotSupportedException();
}
protected virtual Expression TranslateDistinct(Expression source)
{
// TODO
// get all columns.
_queryExpression.SetIsDistinct(true);
_queryExpression.ChangeResultType(QueryResultType.Single);
return source;
}
protected virtual Expression TranslateFirst(LambdaExpression node = null)
{
if (node != null)
{
TranslateWhere(node);
}
_queryExpression.ApplyLimit(1);
_queryExpression.ChangeResultType(QueryResultType.Single);
return node;
}
protected virtual Expression TranslateLast(LambdaExpression node = null)
{
if (node != null)
{
TranslateWhere(node);
}
if (_queryExpression.Orderings.Count == 0)
throw new InvalidOperationException("Before 'Last' must have sort order.");
_queryExpression.ReverseOrdering();
_queryExpression.ApplyLimit(1);
_queryExpression.ChangeResultType(QueryResultType.Single);
return node;
}
protected virtual Expression TranslateSingle(LambdaExpression node = null)
{
if (node != null)
{
TranslateWhere(node);
}
_queryExpression.ApplyLimit(1);
_queryExpression.ChangeResultType(QueryResultType.Single);
return node;
}
protected virtual Expression TranslateSkip(ConstantExpression node)
{
_queryExpression.ApplyOffset(int.Parse(node.Value.ToString()));
return node;
}
protected virtual Expression TranslateTake(ConstantExpression node)
{
_queryExpression.ApplyLimit(int.Parse(node.Value.ToString()));
return node;
}
protected virtual Expression TranslateOrderBy(LambdaExpression node, bool isDescending = false)
{
var column = Visit(node) as SqlExpression;
_queryExpression.AddOrdering(new OrderingExpression(column, isDescending));
return node;
}
protected virtual Expression TranslateSelect(QueryExpression source, LambdaExpression node)
{
if (node.Parameters[0] == node.Body)
return source;
var result = Visit(node.Body);
if (result is ColumnExpression column)
{
_queryExpression.AddProjection(column);
}
return node;
}
protected virtual Expression TranslateGroupBy(LambdaExpression node)
{
if (node.Body.NodeType == ExpressionType.New && node.Body is NewExpression newExpression)
{
foreach (var item in newExpression.Arguments)
{
var c = Visit(item);
_queryExpression.AddGrouping(c as ColumnExpression);
}
}
else
{
var selector = Visit(node.Body);
if (selector is ColumnExpression columnExpression)
{
_queryExpression.AddGrouping(columnExpression);
}
}
return node;
}
}
}
| 35.778584 | 178 | 0.537841 | [
"MIT"
] | jxnkwlp/EasySql | src/EasySql.Core/Query/QueryTranslator.cs | 19,716 | C# |
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using System.Security.Principal;
using CMS.Base;
using CMS.DataEngine;
using CMS.EventLog;
using CMS.Helpers;
using CMS.MacroEngine;
using CMS.Membership;
using CMS.UIControls;
public partial class CMSModules_System_Macros_System_Macros : GlobalAdminPage
{
private const string EVENTLOG_SOURCE_REFRESHSECURITYPARAMS = "Macros - Refresh security parameters";
private readonly NameValueCollection processedObjects = new NameValueCollection();
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
InitForm();
InitAsyncDialog();
}
#region "Async log"
/// <summary>
/// Inits the async dialog.
/// </summary>
private void InitAsyncDialog()
{
ctlAsyncLog.TitleText = GetString("macros.refreshsecurityparams.title");
ctlAsyncLog.OnCancel += ctlAsyncLog_OnCancel;
ctlAsyncLog.OnFinished += ctlAsyncLog_OnFinished;
}
private void ctlAsyncLog_OnCancel(object sender, EventArgs args)
{
EventLogProvider.LogEvent(EventType.INFORMATION, ctlAsyncLog.Parameter, "CANCELLED");
pnlAsyncLog.Visible = false;
ShowConfirmation(GetString("general.actioncanceled"));
}
private void ctlAsyncLog_OnFinished(object sender, EventArgs args)
{
EventLogProvider.LogEvent(EventType.INFORMATION, ctlAsyncLog.Parameter, "FINISHED");
pnlAsyncLog.Visible = false;
ShowConfirmation(GetString("general.actionfinished"));
}
/// <summary>
/// Runs the specified action asynchronously.
/// </summary>
/// <param name="actionName">Action name</param>
/// <param name="action">Action</param>
private void RunAsync(string actionName, AsyncAction action)
{
// Set action name as process parameter
ctlAsyncLog.Parameter = actionName;
// Log async action start
EventLogProvider.LogEvent(EventType.INFORMATION, actionName, "STARTED");
// Run async action
ctlAsyncLog.RunAsync(action, WindowsIdentity.GetCurrent());
}
#endregion
#region "Refresh security params"
/// <summary>
/// Inits the "Refresh security parameters" form.
/// </summary>
private void InitForm()
{
// Init old salt text box
if (chkRefreshAll.Checked)
{
txtOldSalt.Enabled = false;
txtOldSalt.Text = GetString("macros.refreshsecurityparams.refreshalldescription");
}
else
{
txtOldSalt.Enabled = true;
}
chkRefreshAll.CheckedChanged += (sender, args) =>
{
// Clear the textbox after enabling it
if (!chkRefreshAll.Checked)
{
txtOldSalt.Text = null;
}
};
// Init new salt text box
if (chkUseCurrentSalt.Checked)
{
txtNewSalt.Enabled = false;
var customSalt = SettingsHelper.AppSettings[ValidationHelper.APP_SETTINGS_HASH_STRING_SALT];
var resString = string.IsNullOrEmpty(customSalt) ? "macros.refreshsecurityparams.currentsaltisconnectionstring" : "macros.refreshsecurityparams.currentsaltiscustomvalue";
txtNewSalt.Text = GetString(resString);
}
else
{
txtNewSalt.Enabled = true;
}
chkUseCurrentSalt.CheckedChanged += chkUseCurrentSalt_CheckedChanged;
// Init submit button
btnRefreshSecurityParams.Text = GetString("macros.refreshsecurityparams");
btnRefreshSecurityParams.Click += (sender, args) =>
{
var oldSaltInput = txtOldSalt.Text.Trim();
var newSaltInput = txtNewSalt.Text.Trim();
if (!chkRefreshAll.Checked && string.IsNullOrEmpty(oldSaltInput))
{
ShowError(GetString("macros.refreshsecurityparams.oldsaltempty"));
return;
}
if (!chkUseCurrentSalt.Checked && string.IsNullOrEmpty(newSaltInput))
{
ShowError(GetString("macros.refreshsecurityparams.newsaltempty"));
return;
}
pnlAsyncLog.Visible = true;
var objectTypes = Functions.GetObjectTypesWithMacros();
RunAsync(EVENTLOG_SOURCE_REFRESHSECURITYPARAMS, p => RefreshSecurityParams(objectTypes, oldSaltInput, newSaltInput));
};
}
private void chkUseCurrentSalt_CheckedChanged(object sender, EventArgs args)
{
// Clear the textbox after enabling it
if (!chkUseCurrentSalt.Checked)
{
txtNewSalt.Text = null;
}
}
private void AddLog(string logText)
{
ctlAsyncLog.AddLog(logText);
}
/// <summary>
/// Refreshes the security parameters in macros for all the objects of the specified object types.
/// Signs all the macros with the current user if the old salt is not specified.
/// </summary>
/// <param name="objectTypes">Object types</param>
/// <param name="oldSalt">Old salt </param>
/// <param name="newSalt">New salt</param>
private void RefreshSecurityParams(IEnumerable<string> objectTypes, string oldSalt, string newSalt)
{
var oldSaltSpecified = !string.IsNullOrEmpty(oldSalt) && !chkRefreshAll.Checked;
var newSaltSpecified = !string.IsNullOrEmpty(newSalt) && !chkUseCurrentSalt.Checked;
processedObjects.Clear();
using (var context = new CMSActionContext())
{
context.LogEvents = false;
context.LogSynchronization = false;
var processingString = GetString("macros.refreshsecurityparams.processing");
foreach (var objectType in objectTypes)
{
var niceObjectType = GetNiceObjectTypeName(objectType);
AddLog(string.Format(processingString, niceObjectType));
try
{
var infos = new InfoObjectCollection(objectType);
var csi = infos.TypeInfo.ClassStructureInfo;
var orderByIndex = FindOrderByIndex(csi);
if (orderByIndex != null)
{
infos.OrderByColumns = orderByIndex.GetOrderBy();
}
infos.PageSize = 1000;
// Skip object types derived from general data class object type to avoid duplicities
if ((infos.TypeInfo.OriginalObjectType == DataClassInfo.OBJECT_TYPE) && (infos.TypeInfo.ObjectType != DataClassInfo.OBJECT_TYPE))
{
continue;
}
foreach (var info in infos)
{
try
{
bool refreshed;
if (oldSaltSpecified)
{
refreshed = MacroSecurityProcessor.RefreshSecurityParameters(info, oldSalt, newSaltSpecified ? newSalt : ValidationHelper.HashStringSalt, true);
}
else
{
var identityOption = MacroIdentityOption.FromUserInfo(MembershipContext.AuthenticatedUser);
if (chkRefreshAll.Checked && newSaltSpecified)
{
// Do not check integrity, but use new salt
refreshed = MacroSecurityProcessor.RefreshSecurityParameters(info, identityOption, true, newSalt);
}
else
{
// Do not check integrity, sign everything with current user
refreshed = MacroSecurityProcessor.RefreshSecurityParameters(info, identityOption, true);
}
}
if (refreshed)
{
var objectName = HTMLHelper.HTMLEncode(ResHelper.LocalizeString(info.Generalized.ObjectDisplayName));
processedObjects.Add(niceObjectType, objectName);
}
}
catch (Exception ex)
{
string message = "Signing " + TypeHelper.GetNiceObjectTypeName(info.TypeInfo.ObjectType) + " " + info.Generalized.ObjectDisplayName + " failed: " + ex.Message;
using (var exceptionContext = new CMSActionContext())
{
exceptionContext.LogEvents = true;
EventLogProvider.LogEvent(EventType.ERROR, "Import", "MACROSECURITY", message);
}
}
}
}
catch (Exception ex)
{
AddLog(ex.Message);
using (var exceptionContext = new CMSActionContext())
{
exceptionContext.LogEvents = true;
EventLogProvider.LogException(EVENTLOG_SOURCE_REFRESHSECURITYPARAMS, "ERROR", ex);
}
}
}
}
EventLogProvider.LogEvent(EventType.INFORMATION, EVENTLOG_SOURCE_REFRESHSECURITYPARAMS, "PROCESSEDOBJECTS", GetProcessedObjectsForEventLog());
}
/// <summary>
/// Finds suitable index for order by statement.
/// </summary>
private Index FindOrderByIndex(ClassStructureInfo classStructureInfo)
{
var indexes = classStructureInfo.GetTableIndexes();
if (indexes == null)
{
return null;
}
// Clustered index has the best performance for paging but when not unique, stable result sets are not guaranteed over individual paging queries
var clusteredIndex = indexes.GetClusteredIndex();
if ((clusteredIndex != null) && clusteredIndex.IsUnique)
{
return clusteredIndex;
}
// Fall back to primary key index and then any index which is better than paging over non-indexed columns
return indexes.GetPrimaryKeyIndex() ?? indexes.GetIndexes().FirstOrDefault();
}
private static string GetNiceObjectTypeName(string objectType)
{
var objectTypeResourceKey = TypeHelper.GetObjectTypeResourceKey(objectType);
var niceObjectType = GetString(objectTypeResourceKey);
if (niceObjectType.Equals(objectTypeResourceKey, StringComparison.OrdinalIgnoreCase))
{
if (objectType.StartsWith("bizformitem.bizform.", StringComparison.OrdinalIgnoreCase))
{
DataClassInfo dci = DataClassInfoProvider.GetDataClassInfo(objectType.Substring("bizformitem.".Length));
if (dci != null)
{
niceObjectType = "on-line form " + ResHelper.LocalizeString(dci.ClassDisplayName);
}
}
else
{
niceObjectType = objectType;
}
}
return niceObjectType;
}
/// <summary>
/// Gets the list of processed objects formatted for use in the event log.
/// </summary>
private string GetProcessedObjectsForEventLog()
{
return processedObjects.AllKeys.SelectMany(processedObjects.GetValues, (k, v) => string.Format("{0} '{1}'", k, v)).Join("<br />");
}
#endregion
}
| 35.092537 | 187 | 0.577152 | [
"MIT"
] | BryanSoltis/KenticoMVCWidgetShowcase | CMS/CMSModules/System/Macros/System_Macros.aspx.cs | 11,758 | C# |
namespace Papabytes.Cronofy.NetCore
{
using Requests;
using System;
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// Builder for Sequence request.
/// </summary>
public sealed class SequenceRequestBuilder : IBuilder<SequencedAvailabilityRequest.SequenceRequest>
{
/// <summary>
/// The available periods for the request.
/// </summary>
private readonly IList<AvailabilityRequest.AvailablePeriod> availablePeriods =
new List<AvailabilityRequest.AvailablePeriod>();
/// <summary>
/// The subjects of the required participants for the request.
/// </summary>
private readonly IList<string> requiredSubs = new List<string>();
/// <summary>
/// The builders for participant groups for the request.
/// </summary>
private readonly IList<IBuilder<AvailabilityRequest.ParticipantGroup>> groupBuilders =
new List<IBuilder<AvailabilityRequest.ParticipantGroup>>();
/// <summary>
/// The groups for the request.
/// </summary>
private readonly IList<AvailabilityRequest.ParticipantGroup> groups =
new List<AvailabilityRequest.ParticipantGroup>();
/// <summary>
/// The required duration of the request.
/// </summary>
private int requiredDuration;
/// <summary>
/// The ordinal of the request.
/// </summary>
private int? ordinal;
/// <summary>
/// The start interval of the request.
/// </summary>
private int? startInterval;
/// <summary>
/// The after buffer of the request.
/// </summary>
private AvailabilityRequest.BufferDefintion afterBuffer;
/// <summary>
/// The before buffer of the request.
/// </summary>
private AvailabilityRequest.BufferDefintion beforeBuffer;
/// <summary>
/// The sequence id of the request.
/// </summary>
private string sequenceId;
/// <summary>
/// The event request.
/// </summary>
private UpsertEventRequest eventRequest;
/// <summary>
/// Sets the required duration of the request.
/// </summary>
/// <param name="minutes">
/// The number of minutes for the required duration, must be greater than zero.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="minutes"/> is not greater than zero.
/// </exception>
public SequenceRequestBuilder RequiredDuration(int minutes)
{
Preconditions.True(minutes > 0, "minutes must be greater than zero");
this.requiredDuration = minutes;
return this;
}
/// <summary>
/// Sets the start interval of the request.
/// </summary>
/// <param name="minutes">
/// The number of minutes for the start interval, must be greater than zero.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="minutes"/> is not greater than zero.
/// </exception>
public SequenceRequestBuilder StartInterval(int minutes)
{
Preconditions.True(minutes > 0, "minutes must be greater than zero");
this.startInterval = minutes;
return this;
}
/// <summary>
/// Sets the before buffer of the request.
/// </summary>
/// <param name="minimum">
/// The number of minutes for the minimum buffer.
/// </param>
/// <param name="maximum">
/// The number of minutes for the maximum buffer.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="minimum"/> is not null or is negative.
/// </exception>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="maximum"/> is not null or is negative.
/// </exception>
public SequenceRequestBuilder BeforeBuffer(int? minimum, int? maximum = null)
{
if(minimum.HasValue)
{
Preconditions.True(minimum.Value >= 0, "minimum buffer must be postive");
}
if(maximum.HasValue)
{
Preconditions.True(maximum.Value >= 0, "maximum buffer must be postive");
}
this.beforeBuffer = new AvailabilityRequest.BufferDefintion();
if(minimum.HasValue)
{
this.beforeBuffer.Minimum = new AvailabilityRequest.Duration
{
Minutes = minimum.Value
};
}
if(maximum.HasValue)
{
this.beforeBuffer.Maximum = new AvailabilityRequest.Duration
{
Minutes = maximum.Value
};
}
return this;
}
/// <summary>
/// Sets the after buffer of the request.
/// </summary>
/// <param name="minimum">
/// The number of minutes for the minimum buffer.
/// </param>
/// <param name="maximum">
/// The number of minutes for the maximum buffer.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="minimum"/> is not null or is negative.
/// </exception>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="maximum"/> is not null or is negative.
/// </exception>
public SequenceRequestBuilder AfterBuffer(int? minimum, int? maximum = null)
{
if(minimum.HasValue)
{
Preconditions.True(minimum.Value >= 0, "minimum buffer must be postive");
}
if(maximum.HasValue)
{
Preconditions.True(maximum.Value >= 0, "maximum buffer must be postive");
}
this.afterBuffer = new AvailabilityRequest.BufferDefintion();
if(minimum.HasValue)
{
this.afterBuffer.Minimum = new AvailabilityRequest.Duration
{
Minutes = minimum.Value
};
}
if(maximum.HasValue)
{
this.afterBuffer.Maximum = new AvailabilityRequest.Duration
{
Minutes = maximum.Value
};
}
return this;
}
/// <summary>
/// Sets the ordinal of the request.
/// </summary>
/// <param name="ordinal">
/// The ordinal for the request must be greater than zero.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="ordinal"/> is not greater than zero.
/// </exception>
public SequenceRequestBuilder Ordinal(int ordinal)
{
Preconditions.True(ordinal > 0, "ordinal must be greater than zero");
this.ordinal = ordinal;
return this;
}
/// <summary>
/// Sets the sequence id of the request.
/// </summary>
/// <param name="sequenceId">
/// The sequenceId for the request.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
public SequenceRequestBuilder SequenceId(string sequenceId)
{
this.sequenceId = sequenceId;
return this;
}
/// <summary>
/// Adds an available period to the request.
/// </summary>
/// <param name="start">
/// The start of the available period.
/// </param>
/// <param name="end">
/// The end of the available period.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
public SequenceRequestBuilder AddAvailablePeriod(DateTimeOffset start, DateTimeOffset end)
{
var period = new AvailabilityRequest.AvailablePeriod
{
Start = start,
End = end,
};
this.availablePeriods.Add(period);
return this;
}
/// <summary>
/// Adds a required participant to the request.
/// </summary>
/// <param name="sub">
/// The sub of the required participant, must not be blank.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="sub"/> is blank.
/// </exception>
public SequenceRequestBuilder AddRequiredParticipant(string sub)
{
Preconditions.NotBlank("sub", sub);
this.requiredSubs.Add(sub);
return this;
}
/// <summary>
/// Adds a participant group to the request.
/// </summary>
/// <param name="builder">
/// A builder for the group.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="builder"/> is <code>null</code>.
/// </exception>
public SequenceRequestBuilder AddParticipantGroup(IBuilder<AvailabilityRequest.ParticipantGroup> builder)
{
Preconditions.NotNull("builder", builder);
this.groupBuilders.Add(builder);
return this;
}
/// <summary>
/// Adds a participant group to the request.
/// </summary>
/// <param name="participantGroup">
/// A group.
/// </param>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="participantGroup"/> is <code>null</code>.
/// </exception>
public SequenceRequestBuilder AddParticipantGroup(AvailabilityRequest.ParticipantGroup participantGroup)
{
Preconditions.NotNull("participantGroup", participantGroup);
this.groups.Add(participantGroup);
return this;
}
/// <summary>
/// Sets the event on the builder.
/// </summary>
/// <returns>
/// A reference to the <see cref="SequenceRequestBuilder"/>.
/// </returns>
/// <exception cref="ArgumentException">
/// Thrown if <paramref name="eventRequest"/> is <code>null</code>.
/// </exception>
/// <param name="eventRequest">Event request.</param>
public SequenceRequestBuilder Event(UpsertEventRequest eventRequest)
{
Preconditions.NotNull("eventRequest", eventRequest);
this.eventRequest = eventRequest;
return this;
}
/// <inheritdoc />
public SequencedAvailabilityRequest.SequenceRequest Build()
{
var request = new SequencedAvailabilityRequest.SequenceRequest();
request.RequiredDuration = this.GetRequiredDuration();
request.AvailablePeriods = this.availablePeriods.ToArray();
request.Ordinal = this.ordinal;
request.SequenceId = this.sequenceId;
var participantGroups = new List<AvailabilityRequest.ParticipantGroup>();
if(this.requiredSubs.Count > 0)
{
var requiredGroup = new ParticipantGroupBuilder()
.AddMembers(this.requiredSubs)
.AllRequired();
participantGroups.Add(requiredGroup.Build());
}
if(this.eventRequest != null)
{
request.Event = this.eventRequest;
}
if(this.groups.Count > 0)
{
participantGroups.AddRange(this.groups);
}
if(this.groupBuilders.Count > 0)
{
participantGroups.AddRange(this.groupBuilders.Select(gb => gb.Build()));
}
if(this.beforeBuffer != null || this.afterBuffer != null)
{
request.Buffer = new AvailabilityRequest.Buffers();
if(this.beforeBuffer != null)
{
request.Buffer.Before = this.beforeBuffer;
}
if(this.afterBuffer != null)
{
request.Buffer.After = this.afterBuffer;
}
}
if(this.startInterval.HasValue)
{
request.StartInterval = new AvailabilityRequest.Duration
{
Minutes = this.startInterval.Value
};
}
request.Participants = participantGroups;
return request;
}
/// <summary>
/// Gets the required duration as an
/// <see cref="AvailabilityRequest.Duration"/>.
/// </summary>
/// <returns>
/// The required duration as an
/// <see cref="AvailabilityRequest.Duration"/>.
/// </returns>
private AvailabilityRequest.Duration GetRequiredDuration()
{
return new AvailabilityRequest.Duration { Minutes = this.requiredDuration };
}
}
} | 32.705747 | 113 | 0.536304 | [
"MIT"
] | Toky0/cronofy-csharp | src/Papabytes.Cronofy.NetCore/SequenceRequestBuilder.cs | 14,227 | C# |
using System;
using System.Collections.Generic;
using System.Text;
namespace binance.dex.sdk.model
{
public class BlockTrade
{
/*blockTime long timestamp of a block
fee string total fee collected
height long block height
trade[Trade]*/
}
}
| 17.2 | 37 | 0.736434 | [
"MIT"
] | AYCHEX/aex.cSDK | src/model/BlockTrade.cs | 260 | C# |
//
// Klak - Utilities for creative coding with Unity
//
// Copyright (C) 2016 Keijiro Takahashi
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
using UnityEngine;
using System.Reflection;
namespace Klak.Wiring
{
[AddComponentMenu("Transfer/Output/Renderer/Material Float Out")]
public class MaterialFloatOut : BlockBase
{
#region Editable properties
[SerializeField]
Renderer _target;
[SerializeField]
string _propertyName;
#endregion
#region Block I/O
[Inlet]
public float floatInput {
set {
if (!enabled || _target == null || _propertyID < 0) return;
_target.material.SetFloat(_propertyID, value);
}
}
#endregion
#region Private members
int _propertyID = -1;
void OnEnable()
{
if (!string.IsNullOrEmpty(_propertyName))
_propertyID = Shader.PropertyToID(_propertyName);
}
#endregion
}
}
| 30.835821 | 80 | 0.675702 | [
"MIT"
] | mrayy/Embodied-Driven-Design | Embodied-Driven Design/Assets/Libraries/Klak/Wiring/Output/MaterialFloatOut.cs | 2,066 | C# |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.18408
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SineWave.Properties
{
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase
{
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default
{
get
{
return defaultInstance;
}
}
}
}
| 34.290323 | 151 | 0.580433 | [
"MIT"
] | jgvicke/ZedGraph_SineWave | Properties/Settings.Designer.cs | 1,065 | C# |
using System.Collections.Generic;
using TransactionSimulator.DataModels;
namespace TransactionSimulator.Services
{
internal interface ITransactionDataReader
{
IEnumerable<Transaction> ReadTransactions();
}
}
| 20.909091 | 52 | 0.773913 | [
"MIT"
] | JohnnyFayad/mldotnet-real-time-data-streaming-workshop | src/real-time-data-streaming/transaction-simulator/TransactionSimulator/TransactionSimulator/Services/ITransactionDataReader.cs | 232 | C# |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// <auto-generated/>
// Template Source: Templates\CSharp\Requests\IEntityRequestBuilder.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
/// <summary>
/// The interface IDeviceManagementPartnerRequestBuilder.
/// </summary>
public partial interface IDeviceManagementPartnerRequestBuilder : IEntityRequestBuilder
{
/// <summary>
/// Builds the request.
/// </summary>
/// <returns>The built request.</returns>
new IDeviceManagementPartnerRequest Request();
/// <summary>
/// Builds the request.
/// </summary>
/// <param name="options">The query and header options for the request.</param>
/// <returns>The built request.</returns>
new IDeviceManagementPartnerRequest Request(IEnumerable<Option> options);
}
}
| 35.972222 | 153 | 0.5861 | [
"MIT"
] | OfficeGlobal/msgraph-beta-sdk-dotnet | src/Microsoft.Graph/Requests/Generated/IDeviceManagementPartnerRequestBuilder.cs | 1,295 | C# |
/*
// <copyright>
// dotNetRDF is free and open source software licensed under the MIT License
// -------------------------------------------------------------------------
//
// Copyright (c) 2009-2020 dotNetRDF Project (http://dotnetrdf.org/)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is furnished
// to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
*/
using System.Xml;
namespace VDS.RDF.Parsing
{
/// <summary>
/// Represents Position Information from Parsers.
/// </summary>
public class PositionInfo
{
private int _startLine, _endLine, _startPos, _endPos;
/// <summary>
/// Creates a new set of Position Information.
/// </summary>
/// <param name="line">Line.</param>
/// <param name="position">Column.</param>
public PositionInfo(int line, int position)
{
_startLine = _endLine = line;
_startPos = _endPos = position;
}
/// <summary>
/// Creates a new set of Position Information.
/// </summary>
/// <param name="line">Line.</param>
/// <param name="startPosition">Start Column.</param>
/// <param name="endPosition">End Column.</param>
public PositionInfo(int line, int startPosition, int endPosition)
: this(line, startPosition)
{
_endPos = endPosition;
}
/// <summary>
/// Creates a new set of Position Information.
/// </summary>
/// <param name="startLine">Start Line.</param>
/// <param name="endLine">End Line.</param>
/// <param name="startPosition">Start Column.</param>
/// <param name="endPosition">End Column.</param>
public PositionInfo(int startLine, int endLine, int startPosition, int endPosition)
: this(startLine, startPosition, endPosition)
{
_endLine = endLine;
}
/// <summary>
/// Creates a new set of Position Information form some XML Line Information.
/// </summary>
/// <param name="info">XML Line Information.</param>
public PositionInfo(IXmlLineInfo info)
: this(info.LineNumber, info.LinePosition) { }
/// <summary>
/// Gets the Start Line.
/// </summary>
public int StartLine
{
get
{
return _startLine;
}
}
/// <summary>
/// Gets the End Line.
/// </summary>
public int EndLine
{
get
{
return _endLine;
}
}
/// <summary>
/// Gets the Start Column.
/// </summary>
public int StartPosition
{
get
{
return _startPos;
}
}
/// <summary>
/// Gets the End Column.
/// </summary>
public int EndPosition
{
get
{
return _endPos;
}
}
}
}
| 32 | 91 | 0.565228 | [
"MIT"
] | blackwork/dotnetrdf | Libraries/dotNetRDF/Parsing/PositionInfo.cs | 4,032 | C# |
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Text;
using Newtonsoft.Json;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
// Microsoft.CodeAnalysis.ISourceGenerator.Execute(Microsoft.CodeAnalysis.GeneratorExecutionContext) が呼び出されたとき、ソースジェネレータにGeneratorExecutionContextが渡される
namespace BlazorBoilerplate.SourceGenerator
{
/// <summary>
/// BlazorBoilerplate.Shared.DataInterfacesに各Entityのインタフェースを生成する。
/// BlazorBoilerplate.Sharedに各Entityのインタフェースを実装したEntityを生成する。
/// </summary>
[Generator]
public class EntityGenerator : ISourceGenerator
{
/// <summary>
/// このシステムのEnitityのクラス名のリスト
/// </summary>
private List<string> typeNames;
public void Execute(GeneratorExecutionContext context)
{
// "EntityGeneratorConfig.json"を読み込む
var config = GetConfig(context);
if (config == null)
return;
// 全てのEnitityの構文木を取得する
var syntaxTrees = ManualLoad(config.EntitiesPath);
// Enitityのクラス名のリストを取得する
typeNames = syntaxTrees
.Select(i => (ClassDeclarationSyntax)i.GetRoot().DescendantNodes().FirstOrDefault(x => x.IsKind(SyntaxKind.ClassDeclaration)))
.Where(i => i != null)
.Select(i => i.Identifier.ValueText)
.ToList();
foreach (SyntaxTree tree in syntaxTrees)
{
var typeText = GetTypeText(context, tree, config, out var fileName);
if (typeText == null)
continue;
context.AddSource(fileName, SourceText.From(typeText, Encoding.UTF8));
}
}
public void Initialize(GeneratorInitializationContext context)
{
#if DEBUG
//if (!Debugger.IsAttached)
//{
// Debugger.Launch();
//}
#endif
}
/// <summary>
///
/// </summary>
/// <param name="context">使わないんかーい</param>
/// <param name="file">.csの構文木</param>
/// <param name="config">コード生成に関する設定</param>
/// <param name="fileName">生成するファイルの名前</param>
/// <returns></returns>
private string GetTypeText(GeneratorExecutionContext context, SyntaxTree file, EntityGeneratorConfig config, out string fileName)
{
string classText = null;
fileName = null;
var root = file.GetRoot();
// クラス宣言を取得
var classDeclaration = (ClassDeclarationSyntax)root.DescendantNodes().FirstOrDefault(x => x.IsKind(SyntaxKind.ClassDeclaration));
if (classDeclaration == null)
return null;
// プロパティを取得(Jsonに含めないフィールドは除く)
var properties = classDeclaration.Members.OfType<PropertyDeclarationSyntax>()
.Where(x => !x.AttributeLists.SelectMany(x => x.Attributes).Any(a => a.Name.ToString() == "JsonIgnore"));
// プロパティの文字列をバッファする
var propertiesSb = new StringBuilder();
// クラス宣言からクラス名を取得
var typeName = classDeclaration.Identifier.ValueText;
// 参照を追加して新しくコンパイルする(難しいなあ…)
var compilation = CSharpCompilation.Create(typeName)
.AddReferences(MetadataReference.CreateFromFile(typeof(string).Assembly.Location))
.AddSyntaxTrees(file);
// IAuditableを実装している場合、更新に関するプロパティを追加する
if (classDeclaration.BaseList?.Types.Any(i => i.Type is IdentifierNameSyntax && ((IdentifierNameSyntax)i.Type).Identifier.ValueText == "IAuditable") == true)
{
if (config.GenEntities)
propertiesSb.Append(@"
public DateTime CreatedOn
{
get { return GetValue<DateTime>(); }
set { SetValue(value); }
}
public DateTime? ModifiedOn
{
get { return GetValue<DateTime?>(); }
set { SetValue(value); }
}
public ApplicationUser CreatedBy
{
get { return GetValue<ApplicationUser>(); }
set { SetValue(value); }
}
public Guid? CreatedById
{
get { return GetValue<Guid?>(); }
set { SetValue(value); }
}
public ApplicationUser ModifiedBy
{
get { return GetValue<ApplicationUser>(); }
set { SetValue(value); }
}
public Guid? ModifiedById
{
get { return GetValue<Guid?>(); }
set { SetValue(value); }
}
");
// GenInterfacesで生成したインタフェースを実装する場合
if (config.GenEntities && config.EntityWithInterface)
propertiesSb.Append(@$"
IApplicationUser I{typeName}.CreatedBy {{ get => CreatedBy; set => CreatedBy = (ApplicationUser)value; }}
IApplicationUser I{typeName}.ModifiedBy {{ get => ModifiedBy; set => ModifiedBy = (ApplicationUser)value; }}
");
// インタフェースを生成する場合、プロパティ宣言のみ行う。
if (config.GenInterfaces)
propertiesSb.Append(@"
DateTime CreatedOn { get; set; }
DateTime? ModifiedOn { get; set; }
IApplicationUser CreatedBy { get; set; }
Guid? CreatedById { get; set; }
IApplicationUser ModifiedBy { get; set; }
Guid? ModifiedById { get; set; }
");
}
// 取得したプロパティに関して
foreach (var pds in properties)
{
var property = compilation
.GetSemanticModel(pds.SyntaxTree)
.GetDeclaredSymbol(pds);
// Public以外は処理しない
if (property.DeclaredAccessibility != Accessibility.Public)
continue;
var propertyType = property.Type as INamedTypeSymbol; // 配列,ポインタ,型パラメタ以外の型(もし配列ならばnullになる)
var setter = @"
set { SetValue(value); }";
// タイプ名を取得、配列だった場合propertyType=nullになっているので、"[]"を付ける
string propertyTypeName = propertyType != null ? propertyType.Name : ((IArrayTypeSymbol)((IArrayTypeSymbol)property.Type).OriginalDefinition).ElementType.Name + "[]";
if (propertyType?.IsGenericType == true) // Tの場合
{
propertyTypeName = GetFirstGenericTypeName(pds, property);
if (propertyType.NullableAnnotation == NullableAnnotation.Annotated)
propertyTypeName = $"{propertyTypeName}?";
else if (propertyType.Name == "ICollection")
{
if (config.GenInterfaces)
propertiesSb.Append(@$"
ICollection<I{propertyTypeName}> {property.Name} {{ get; }}
");
if (config.GenEntities && config.EntityWithInterface)
propertiesSb.Append(@$"
ICollection<I{propertyTypeName}> I{typeName}.{property.Name} {{ get => {property.Name}.Select(i => (I{propertyTypeName})i).ToList(); }}
");
propertyTypeName = $"NavigationSet<{propertyTypeName}>";
setter = string.Empty;
}
}
else // Tではない場合
{
if (typeNames.Contains(propertyTypeName))
{
var iPropertyTypeName = $"I{propertyTypeName}";
if (config.GenEntities && config.EntityWithInterface)
propertiesSb.Append(@$"
{iPropertyTypeName} I{typeName}.{property.Name} {{ get => {property.Name}; set => {property.Name} = ({propertyTypeName})value; }}
");
if (config.GenInterfaces)
propertiesSb.Append(@$"
{iPropertyTypeName} {property.Name} {{ get; set; }}
");
}
else if (config.GenInterfaces)
propertiesSb.Append(@$"
{propertyTypeName} {property.Name} {{ get; set; }}
");
}
if (config.GenEntities && config.EntityWithInterface)
propertiesSb.Append(@$"
public {propertyTypeName} {property.Name}
{{
get {{ return GetValue<{propertyTypeName}>(); }}{setter}
}}
");
if (config.GenEntities && !config.EntityWithInterface)
propertiesSb.Append(@$"
public {propertyTypeName} {property.Name}
{{
get {{ return GetValue<{propertyTypeName}>(); }}{setter}
}}
");
}
fileName = $"{typeName}.g.cs";
if (config.GenInterfaces)
{
fileName = $"I{typeName}.g.cs";
classText = @$"//Autogenerated by EntityGenerator
using BlazorBoilerplate.Constants;
using System;
using System.Collections.Generic;
using System.ComponentModel;
namespace BlazorBoilerplate.Shared.DataInterfaces
{{
public interface I{typeName}
{{{propertiesSb}
}}
}}
";
}
// インタフェースも生成する場合
if (config.GenEntities && config.EntityWithInterface)
classText = @$"//Autogenerated by EntityGenerator
using BlazorBoilerplate.Constants;
using BlazorBoilerplate.Shared.DataInterfaces;
using Breeze.Sharp;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
namespace BlazorBoilerplate.Shared.Dto.Db
{{
public partial class {typeName} : BaseEntity, I{typeName}
{{{propertiesSb}
}}
}}
";
// インタフェースは生成しない場合
if (config.GenEntities && !config.EntityWithInterface)
classText = @$"//Autogenerated by EntityGenerator
using BlazorBoilerplate.Constants;
using Breeze.Sharp;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
namespace BlazorBoilerplate.Shared.Dto.Db
{{
public partial class {typeName} : BaseEntity
{{{propertiesSb}
}}
}}
";
return classText;
}
/// <summary>
/// .csを子ディレクトリも一緒に検索してその構文木を全て取得する
/// Roslynを使用
/// </summary>
/// <param name="rootDir">.csを検索するディレクトリ</param>
/// <returns></returns>
private IEnumerable<SyntaxTree> ManualLoad(string rootDir)
{
// 子ディレクトリも検索する
foreach (var filepath in Directory.GetFiles(rootDir, "*.cs", SearchOption.AllDirectories))
{
var file = File.ReadAllText(filepath);
yield return CSharpSyntaxTree.ParseText(file);
}
}
/// <summary>
/// 生成に関する設定を読み込む
/// </summary>
/// <param name="context"></param>
/// <returns></returns>
private EntityGeneratorConfig GetConfig(GeneratorExecutionContext context)
{
var configFile = context.AdditionalFiles.FirstOrDefault(x => Path.GetFileName(x.Path) == "EntityGeneratorConfig.json");
if (configFile == null)
return null;
var jsonString = File.ReadAllText(configFile.Path);
var config = JsonConvert.DeserializeObject<EntityGeneratorConfig>(jsonString);
// OSでのパス表記の違いを修正して、configFile.Pathから繋いだパスにする
config.EntitiesPath = CrossPlatform.PathCombine(Path.GetDirectoryName(configFile.Path), config.EntitiesPath.Split("\\".ToCharArray()));
return config;
}
private static string BuildTypeName(TypeSyntax node, IPropertySymbol property)
{
if (node is NullableTypeSyntax nts)
node = nts.ElementType;
if (node is IdentifierNameSyntax ins)
{
var namedType = property.Type as INamedTypeSymbol;
var typeArg = namedType!.TypeArguments
.First(x => x.Name == ins.Identifier.ValueText);
return ins.Identifier.ValueText;
}
if (node is PredefinedTypeSyntax pts)
return pts.Keyword.ValueText;
return default;
}
static string GetFirstGenericTypeName(
PropertyDeclarationSyntax pds,
IPropertySymbol property)
{
var gns = pds.DescendantNodes()
.OfType<GenericNameSyntax>();
var type = pds.Type;
if (gns.Any())
type = gns.First().TypeArgumentList.Arguments.First();
return BuildTypeName(type, property);
}
}
}
| 34.024129 | 182 | 0.57001 | [
"MIT"
] | genkokudo/BlizzardHaunt | src/Utils/BlazorBoilerplate.SourceGenerator/EntityGenerator.cs | 13,711 | C# |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Leetcode191-NumberOf1")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Leetcode191-NumberOf1")]
[assembly: AssemblyCopyright("Copyright © 2018")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("baea05d1-f5f6-4fc8-858e-77474d28f8c3")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| 38.108108 | 84 | 0.749645 | [
"MIT"
] | yanglr/LeetCodeOJ | csharp-leetcode/vs projects/leetcode191-NumberOf1/Leetcode191-NumberOf1/Properties/AssemblyInfo.cs | 1,413 | C# |
namespace SourceControlSystem.Data
{
using System;
using System.Data.Entity;
using System.Linq;
public class EfGenericRepository<T> : IRepository<T> where T : class
{
public EfGenericRepository(ISourceControlSystemDbContext context)
{
if (context == null)
{
throw new ArgumentException("An instance of DbContext is required to use this repository.", "context");
}
this.Context = context;
this.DbSet = this.Context.Set<T>();
}
protected IDbSet<T> DbSet { get; set; }
protected ISourceControlSystemDbContext Context { get; set; }
public virtual IQueryable<T> All()
{
return this.DbSet.AsQueryable();
}
public virtual T GetById(object id)
{
return this.DbSet.Find(id);
}
public virtual void Add(T entity)
{
var entry = this.Context.Entry(entity);
if (entry.State != EntityState.Detached)
{
entry.State = EntityState.Added;
}
else
{
this.DbSet.Add(entity);
}
}
public virtual void Update(T entity)
{
var entry = this.Context.Entry(entity);
if (entry.State == EntityState.Detached)
{
this.DbSet.Attach(entity);
}
entry.State = EntityState.Modified;
}
public virtual void Delete(T entity)
{
var entry = this.Context.Entry(entity);
if (entry.State != EntityState.Deleted)
{
entry.State = EntityState.Deleted;
}
else
{
this.DbSet.Attach(entity);
this.DbSet.Remove(entity);
}
}
public virtual void Delete(object id)
{
var entity = this.GetById(id);
if (entity != null)
{
this.Delete(entity);
}
}
public virtual T Attach(T entity)
{
return this.Context.Set<T>().Attach(entity);
}
public virtual void Detach(T entity)
{
var entry = this.Context.Entry(entity);
entry.State = EntityState.Detached;
}
public int SaveChanges()
{
return this.Context.SaveChanges();
}
public void Dispose()
{
this.Context.Dispose();
}
}
}
| 24.75 | 119 | 0.491841 | [
"MIT"
] | EmilMitev/Telerik-Academy | Single Page Applications/Exam/SourceControlSystem-Server/Data/SourceControlSystem.Data/EfGenericRepository.cs | 2,576 | C# |
using System.Collections.Generic;
namespace Kore.Plugins.Messaging.Forums.Models
{
public class ForumGroupModel
{
public ForumGroupModel()
{
this.Forums = new List<ForumRowModel>();
}
public int Id { get; set; }
public string Name { get; set; }
public string SeName { get; set; }
public IList<ForumRowModel> Forums { get; set; }
}
} | 21.85 | 57 | 0.562929 | [
"MIT"
] | artinite21/KoreCMS | Plugins/Kore.Plugins.Messaging.Forums/Models/ForumGroupModel.cs | 439 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using ZWave.Channel.Protocol;
namespace ZWave.CommandClasses
{
public class ThermostatModeSupportedValuesReport : NodeReport
{
public readonly ThermostatModeValue[] SupportedModes;
internal ThermostatModeSupportedValuesReport(Node node, byte[] payload) : base(node)
{
if (payload == null)
throw new ArgumentNullException(nameof(payload));
if (payload.Length < 1)
throw new ReponseFormatException($"The response was not in the expected format. {GetType().Name}: Payload: {BitConverter.ToString(payload)}");
var set = new HashSet<ThermostatModeValue>();
foreach (ThermostatModeValue value in Enum.GetValues(typeof(ThermostatModeValue)))
{
int bitIndex = (int)value;
int byteIndex = bitIndex / 8;
if (byteIndex >= payload.Length)
{
break;
}
byte currentByte = payload[byteIndex];
uint bit = 1U << (bitIndex % 8);
if ((bit & currentByte) == bit)
{
set.Add(value);
}
}
SupportedModes = set.ToArray();
}
public override string ToString()
{
return $"SupportedModes:{SupportedModes}";
}
}
}
| 30.3125 | 158 | 0.55189 | [
"MIT"
] | MiTheFreeman/ZWave4Net | ZWave/CommandClasses/ThermostatModeSupportedValuesReport.cs | 1,457 | C# |
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using EasyAbp.EShop.Plugins.Coupons.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Volo.Abp.Domain.Repositories.EntityFrameworkCore;
using Volo.Abp.EntityFrameworkCore;
using Volo.Abp.Timing;
namespace EasyAbp.EShop.Plugins.Coupons.Coupons
{
public class CouponRepository : EfCoreRepository<ICouponsDbContext, Coupon, Guid>, ICouponRepository
{
public CouponRepository(IDbContextProvider<ICouponsDbContext> dbContextProvider) : base(dbContextProvider)
{
}
public virtual IQueryable<Coupon> GetAvailableCouponQueryable(IClock clock)
{
var now = clock.Now;
return DbSet
.Where(x => x.ExpirationTime > now)
.Join(
DbContext.CouponTemplates.Where(x => !x.UsableBeginTime.HasValue || x.UsableBeginTime.Value <= now),
coupon => coupon.CouponTemplateId,
couponTemplate => couponTemplate.Id,
(coupon, couponTemplate) => coupon
);
}
public override async Task<Coupon> InsertAsync(Coupon entity, bool autoSave = false, CancellationToken cancellationToken = new CancellationToken())
{
var clock = ServiceProvider.GetRequiredService<IClock>();
var notExpiredCouponQuantity = await (await GetDbSetAsync()).CountAsync(
x => x.UserId == entity.UserId && (!x.ExpirationTime.HasValue || x.ExpirationTime.Value > clock.Now),
cancellationToken);
if (notExpiredCouponQuantity >= CouponsConsts.MaxNotExpiredCouponQuantityPerUser)
{
throw new UserCouponQuantityExceedsLimitException(CouponsConsts.MaxNotExpiredCouponQuantityPerUser);
}
return await base.InsertAsync(entity, autoSave, cancellationToken);
}
}
} | 40.28 | 155 | 0.666832 | [
"MIT"
] | EasyAbp/EShop | plugins/Coupons/src/EasyAbp.EShop.Plugins.Coupons.EntityFrameworkCore/EasyAbp/EShop/Plugins/Coupons/Coupons/CouponRepository.cs | 2,014 | C# |
using Microsoft.CodeAnalysis;
using System.Collections.Immutable;
using Microsoft.CodeAnalysis.CodeFixes;
using System.Threading.Tasks;
using System.Linq;
namespace RefactoringEssentials.CSharp.Diagnostics
{
[ExportCodeFixProvider(LanguageNames.CSharp), System.Composition.Shared]
public class UnusedAnonymousMethodSignatureCodeFixProvider : CodeFixProvider
{
public override ImmutableArray<string> FixableDiagnosticIds
{
get
{
return ImmutableArray.Create(CSharpDiagnosticIDs.UnusedAnonymousMethodSignatureAnalyzerID);
}
}
public override FixAllProvider GetFixAllProvider()
{
return WellKnownFixAllProviders.BatchFixer;
}
public async override Task RegisterCodeFixesAsync(CodeFixContext context)
{
var document = context.Document;
var cancellationToken = context.CancellationToken;
var span = context.Span;
var diagnostics = context.Diagnostics;
var root = await document.GetSyntaxRootAsync(cancellationToken);
var diagnostic = diagnostics.First();
var node = root.FindNode(context.Span);
//if (!node.IsKind(SyntaxKind.BaseList))
// continue;
var newRoot = root.RemoveNode(node, SyntaxRemoveOptions.KeepNoTrivia);
context.RegisterCodeFix(CodeActionFactory.Create(node.Span, diagnostic.Severity, "Remove redundant signature", document.WithSyntaxRoot(newRoot)), diagnostic);
}
}
} | 38.219512 | 170 | 0.683472 | [
"MIT"
] | Wagnerp/RefactoringEssentials | RefactoringEssentials/CSharp/Diagnostics/Synced/RedundanciesInCode/UnusedAnonymousMethodSignatureCodeFixProvider.cs | 1,567 | C# |
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using BlazorShared;
using Microsoft.Extensions.Diagnostics.HealthChecks;
using Microsoft.Extensions.Options;
namespace Microsoft.eShopWeb.Web.HealthChecks;
public class ApiHealthCheck : IHealthCheck
{
private readonly BaseUrlConfiguration _baseUrlConfiguration;
public ApiHealthCheck(IOptions<BaseUrlConfiguration> baseUrlConfiguration)
{
_baseUrlConfiguration = baseUrlConfiguration.Value;
}
public async Task<HealthCheckResult> CheckHealthAsync(
HealthCheckContext context,
CancellationToken cancellationToken = default(CancellationToken))
{
string myUrl = _baseUrlConfiguration.ApiBase + "catalog-items";
var client = new HttpClient();
var response = await client.GetAsync(myUrl);
var pageContents = await response.Content.ReadAsStringAsync();
if (pageContents.Contains(".NET Bot Black Sweatshirt"))
{
return HealthCheckResult.Healthy("The check indicates a healthy result.");
}
return HealthCheckResult.Unhealthy("The check indicates an unhealthy result.");
}
}
| 33.628571 | 87 | 0.736619 | [
"MIT"
] | 10088/eShopOnWeb | src/Web/HealthChecks/ApiHealthCheck.cs | 1,179 | C# |
// <copyright file="Server.cs" company="Mozilla">
// This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, you can obtain one at http://mozilla.org/MPL/2.0/.
// </copyright>
using System.Collections.Generic;
using Newtonsoft.Json;
namespace FirefoxPrivateNetwork.JSONStructures
{
/// <summary>
/// Server object containing VPN server endpoint info.
/// </summary>
public class Server
{
/// <summary>
/// Gets or sets the server's hostname.
/// </summary>
[JsonProperty("hostname")]
public string Hostname { get; set; }
/// <summary>
/// Gets or sets the public facing IPv4 endpoint address of the server.
/// </summary>
[JsonProperty("ipv4_addr_in")]
public string IPv4EndpointAddress { get; set; }
/// <summary>
/// Gets or sets the weight of the server, indicating load and preferability when in the process of picking random servers.
/// </summary>
[JsonProperty("weight")]
public int Weight { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to include this server as part of a country.
/// </summary>
[JsonProperty("include_in_country")]
public bool IncludeInCountry { get; set; }
/// <summary>
/// Gets or sets the public key of the server.
/// </summary>
[JsonProperty("public_key")]
public string PublicKey { get; set; }
/// <summary>
/// Gets or sets the port ranges available with this server.
/// </summary>
[JsonProperty("port_ranges")]
public List<List<int>> PortRanges { get; set; }
/// <summary>
/// Gets or sets the IPv4 gateway/DNS server when connecting to this server.
/// </summary>
[JsonProperty("ipv4_gateway")]
public string IPv4Gateway { get; set; }
/// <summary>
/// Gets or sets the IPv6 gateway/DNS server when connecting to this server.
/// </summary>
[JsonProperty("ipv6_gateway")]
public string IPv6Gateway { get; set; }
}
}
| 34.65625 | 195 | 0.600541 | [
"MPL-2.0"
] | Callek/guardian-vpn-windows | ui/src/JSONStructures/Server/Server.cs | 2,220 | C# |
//------------------------------------------------------------------------------
// <auto-generated>
// 這段程式碼是由工具產生的。
// 執行階段版本:4.0.30319.42000
//
// 對這個檔案所做的變更可能會造成錯誤的行為,而且如果重新產生程式碼,
// 變更將會遺失。
// </auto-generated>
//------------------------------------------------------------------------------
namespace AutoCheckInOut.Properties {
using System;
/// <summary>
/// 用於查詢當地語系化字串等的強類型資源類別。
/// </summary>
// 這個類別是自動產生的,是利用 StronglyTypedResourceBuilder
// 類別透過 ResGen 或 Visual Studio 這類工具。
// 若要加入或移除成員,請編輯您的 .ResX 檔,然後重新執行 ResGen
// (利用 /str 選項),或重建您的 VS 專案。
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// 傳回這個類別使用的快取的 ResourceManager 執行個體。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("AutoCheckInOut.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// 覆寫目前執行緒的 CurrentUICulture 屬性,對象是所有
/// 使用這個強類型資源類別的資源查閱。
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
| 38.609375 | 180 | 0.591663 | [
"MIT"
] | n696395/AutoCheckInOut | AutoCheckIn/Properties/Resources.Designer.cs | 2,853 | C# |
using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Neo.Cryptography;
using System.Text;
namespace Neo.UnitTests.Cryptography
{
[TestClass]
public class UT_Murmur128
{
[TestMethod]
public void TestGetHashSize()
{
Murmur128 murmur128 = new Murmur128(1);
murmur128.HashSize.Should().Be(128);
}
[TestMethod]
public void TestHashCore()
{
byte[] array = Encoding.ASCII.GetBytes("hello");
array.Murmur128(123u).ToHexString().ToString().Should().Be("0bc59d0ad25fde2982ed65af61227a0e");
array = Encoding.ASCII.GetBytes("world");
array.Murmur128(123u).ToHexString().ToString().Should().Be("3d3810fed480472bd214a14023bb407f");
array = Encoding.ASCII.GetBytes("hello world");
array.Murmur128(123u).ToHexString().ToString().Should().Be("e0a0632d4f51302c55e3b3e48d28795d");
array = "718f952132679baa9c5c2aa0d329fd2a".HexToBytes();
array.Murmur128(123u).ToHexString().ToString().Should().Be("9b4aa747ff0cf4e41b3d96251551c8ae");
}
}
}
| 33 | 107 | 0.649351 | [
"MIT"
] | Ashuaidehao/neo | tests/neo.UnitTests/Cryptography/UT_Murmur128.cs | 1,155 | C# |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using JetBrains.Annotations;
using Microsoft.EntityFrameworkCore.Metadata.Conventions.Internal;
using Microsoft.EntityFrameworkCore.Storage;
using Microsoft.EntityFrameworkCore.Storage.Internal;
using Microsoft.EntityFrameworkCore.Utilities;
namespace Microsoft.EntityFrameworkCore.Metadata.Conventions
{
public class SqlServerConventionSetBuilder : RelationalConventionSetBuilder
{
private readonly ISqlGenerationHelper _sqlGenerationHelper;
public SqlServerConventionSetBuilder(
[NotNull] RelationalConventionSetBuilderDependencies dependencies,
[NotNull] ISqlGenerationHelper sqlGenerationHelper)
: base(dependencies)
{
_sqlGenerationHelper = sqlGenerationHelper;
}
public override ConventionSet AddConventions(ConventionSet conventionSet)
{
Check.NotNull(conventionSet, nameof(conventionSet));
base.AddConventions(conventionSet);
var valueGenerationStrategyConvention = new SqlServerValueGenerationStrategyConvention();
conventionSet.ModelInitializedConventions.Add(valueGenerationStrategyConvention);
ValueGeneratorConvention valueGeneratorConvention = new SqlServerValueGeneratorConvention();
ReplaceConvention(conventionSet.BaseEntityTypeChangedConventions, valueGeneratorConvention);
var sqlServerInMemoryTablesConvention = new SqlServerMemoryOptimizedTablesConvention();
conventionSet.EntityTypeAnnotationChangedConventions.Add(sqlServerInMemoryTablesConvention);
ReplaceConvention(conventionSet.PrimaryKeyChangedConventions, valueGeneratorConvention);
conventionSet.KeyAddedConventions.Add(sqlServerInMemoryTablesConvention);
ReplaceConvention(conventionSet.ForeignKeyAddedConventions, valueGeneratorConvention);
ReplaceConvention(conventionSet.ForeignKeyRemovedConventions, valueGeneratorConvention);
var sqlServerIndexConvention = new SqlServerIndexConvention(_sqlGenerationHelper);
conventionSet.IndexAddedConventions.Add(sqlServerInMemoryTablesConvention);
conventionSet.IndexAddedConventions.Add(sqlServerIndexConvention);
conventionSet.IndexUniquenessChangedConventions.Add(sqlServerIndexConvention);
conventionSet.IndexAnnotationChangedConventions.Add(sqlServerIndexConvention);
conventionSet.PropertyNullabilityChangedConventions.Add(sqlServerIndexConvention);
conventionSet.PropertyAnnotationChangedConventions.Add(sqlServerIndexConvention);
conventionSet.PropertyAnnotationChangedConventions.Add((SqlServerValueGeneratorConvention)valueGeneratorConvention);
return conventionSet;
}
public static ConventionSet Build()
{
var sqlServerTypeMapper = new SqlServerTypeMapper(new RelationalTypeMapperDependencies());
return new SqlServerConventionSetBuilder(
new RelationalConventionSetBuilderDependencies(sqlServerTypeMapper, null, null),
new SqlServerSqlGenerationHelper(new RelationalSqlGenerationHelperDependencies()))
.AddConventions(
new CoreConventionSetBuilder(
new CoreConventionSetBuilderDependencies(sqlServerTypeMapper))
.CreateConventionSet());
}
}
}
| 46.87013 | 128 | 0.751455 | [
"Apache-2.0"
] | vadzimpm/EntityFramework | src/EFCore.SqlServer/Metadata/Conventions/SqlServerConventionSetBuilder.cs | 3,609 | C# |
using Microsoft.EntityFrameworkCore.Migrations;
using System;
using System.Collections.Generic;
namespace aspnetcoreUseMySqlEFCore.Migrations
{
public partial class test : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "DataSet",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("MySQL:AutoIncrement", true),
Annotation = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_DataSet", x => x.Id);
});
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "DataSet");
}
}
}
| 29.5 | 71 | 0.532839 | [
"MIT"
] | yiyungent/demos | netcore/aspnetcoreUseMySqlEFCore/aspnetcoreUseMySqlEFCore/Migrations/20180822162556_test.cs | 946 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Microsoft.Win32.SafeHandles;
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
internal static partial class Advapi32
{
[GeneratedDllImport(Libraries.Advapi32, EntryPoint = "QueryServiceConfigW", SetLastError = true)]
internal static partial bool QueryServiceConfig(SafeServiceHandle serviceHandle, IntPtr queryServiceConfigPtr, int bufferSize, out int bytesNeeded);
}
}
| 34.529412 | 156 | 0.776831 | [
"MIT"
] | MatthewJohn/runtime | src/libraries/Common/src/Interop/Windows/Advapi32/Interop.QueryServiceConfig.cs | 587 | C# |
/*
* Copyright 2012-2017 The Pkcs11Interop Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Written for the Pkcs11Interop project by:
* Jaroslav IMRICH <[email protected]>
*/
using System.IO;
using Net.Pkcs11Interop.Common;
using Net.Pkcs11Interop.HighLevelAPI41;
using NUnit.Framework;
namespace Net.Pkcs11Interop.Tests.HighLevelAPI41
{
/// <summary>
/// Digesting tests.
/// </summary>
[TestFixture()]
public class _12_DigestTest
{
/// <summary>
/// Single-part digesting test.
/// </summary>
[Test()]
public void _01_DigestSinglePartTest()
{
Helpers.CheckPlatform();
using (Pkcs11 pkcs11 = new Pkcs11(Settings.Pkcs11LibraryPath, Settings.AppType))
{
// Find first slot with token present
Slot slot = Helpers.GetUsableSlot(pkcs11);
// Open RO session
using (Session session = slot.OpenSession(SessionType.ReadOnly))
{
// Specify digesting mechanism
Mechanism mechanism = new Mechanism(CKM.CKM_SHA_1);
byte[] sourceData = ConvertUtils.Utf8StringToBytes("Hello world");
// Digest data
byte[] digest = session.Digest(mechanism, sourceData);
// Do something interesting with digest value
Assert.IsTrue(ConvertUtils.BytesToBase64String(digest) == "e1AsOh9IyGCa4hLN+2Od7jlnP14=");
}
}
}
/// <summary>
/// Multi-part digesting test.
/// </summary>
[Test()]
public void _02_DigestMultiPartTest()
{
Helpers.CheckPlatform();
using (Pkcs11 pkcs11 = new Pkcs11(Settings.Pkcs11LibraryPath, Settings.AppType))
{
// Find first slot with token present
Slot slot = Helpers.GetUsableSlot(pkcs11);
// Open RO session
using (Session session = slot.OpenSession(SessionType.ReadOnly))
{
// Specify digesting mechanism
Mechanism mechanism = new Mechanism(CKM.CKM_SHA_1);
byte[] sourceData = ConvertUtils.Utf8StringToBytes("Hello world");
byte[] digest = null;
// Multipart digesting can be used i.e. for digesting of streamed data
using (MemoryStream inputStream = new MemoryStream(sourceData))
{
// Digest data
digest = session.Digest(mechanism, inputStream);
}
// Do something interesting with digest value
Assert.IsTrue(ConvertUtils.BytesToBase64String(digest) == "e1AsOh9IyGCa4hLN+2Od7jlnP14=");
}
}
}
/// <summary>
/// DigestKey test.
/// </summary>
[Test()]
public void _03_DigestKeyTest()
{
Helpers.CheckPlatform();
using (Pkcs11 pkcs11 = new Pkcs11(Settings.Pkcs11LibraryPath, Settings.AppType))
{
// Find first slot with token present
Slot slot = Helpers.GetUsableSlot(pkcs11);
// Open RW session
using (Session session = slot.OpenSession(SessionType.ReadWrite))
{
// Login as normal user
session.Login(CKU.CKU_USER, Settings.NormalUserPin);
// Generate symetric key
ObjectHandle generatedKey = Helpers.GenerateKey(session);
// Specify digesting mechanism
Mechanism mechanism = new Mechanism(CKM.CKM_SHA_1);
// Digest key
byte[] digest = session.DigestKey(mechanism, generatedKey);
// Do something interesting with digest value
Assert.IsNotNull(digest);
session.DestroyObject(generatedKey);
session.Logout();
}
}
}
}
}
| 35.57554 | 110 | 0.529221 | [
"Apache-2.0"
] | arkkadin/pkcs11Interop | src/Pkcs11Interop/Pkcs11InteropTests/HighLevelAPI41/_12_DigestTest.cs | 4,945 | C# |
using System;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("AWSSDK.Pricing")]
#if BCL35
[assembly: AssemblyDescription("The Amazon Web Services SDK for .NET (3.5) - AWS Price List Service. We launched new service, Price List Service.")]
#elif BCL45
[assembly: AssemblyDescription("The Amazon Web Services SDK for .NET (4.5) - AWS Price List Service. We launched new service, Price List Service.")]
#elif NETSTANDARD20
[assembly: AssemblyDescription("The Amazon Web Services SDK for .NET (NetStandard 2.0) - AWS Price List Service. We launched new service, Price List Service.")]
#elif NETCOREAPP3_1
[assembly: AssemblyDescription("The Amazon Web Services SDK for .NET (.NET Core 3.1) - AWS Price List Service. We launched new service, Price List Service.")]
#else
#error Unknown platform constant - unable to set correct AssemblyDescription
#endif
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyProduct("Amazon Web Services SDK for .NET")]
[assembly: AssemblyCompany("Amazon.com, Inc")]
[assembly: AssemblyCopyright("Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("3.3")]
[assembly: AssemblyFileVersion("3.7.1.99")]
[assembly: System.CLSCompliant(true)]
#if BCL
[assembly: System.Security.AllowPartiallyTrustedCallers]
#endif | 43.058824 | 160 | 0.758652 | [
"Apache-2.0"
] | EbstaLimited/aws-sdk-net | sdk/src/Services/Pricing/Properties/AssemblyInfo.cs | 2,196 | C# |
using LanguageExt;
using System;
using System.Linq;
using System.Reactive.Concurrency;
using System.Reactive.Linq;
using static LanguageExt.Prelude;
namespace Echo
{
public static partial class Process
{
/// <summary>
/// Publish a message for any listening subscribers
/// </summary>
/// <remarks>
/// This should be used from within a process' message loop only
/// </remarks>
/// <param name="message">Message to publish</param>
public static Unit publish<T>(T message) =>
InMessageLoop
? ActorContext.Publish(message)
: raiseUseInMsgLoopOnlyException<Unit>(nameof(publish));
/// <summary>
/// Publish a message for any listening subscribers, delayed.
/// </summary>
/// <remarks>
/// This should be used from within a process' message loop only
/// </remarks>
/// <param name="message">Message to publish</param>
/// <param name="delayFor">How long to delay sending for</param>
/// <returns>IDisposable that you can use to cancel the operation if necessary. You do not need to call Dispose
/// for any other reason.</returns>
public static IDisposable publish<T>(T message, TimeSpan delayFor) =>
InMessageLoop
? safedelay(() => ActorContext.Publish(message), delayFor)
: raiseUseInMsgLoopOnlyException<IDisposable>(nameof(publish));
/// <summary>
/// Publish a message for any listening subscribers, delayed.
/// </summary>
/// <remarks>
/// This should be used from within a process' message loop only
/// This will fail to be accurate across a Daylight Saving Time boundary
/// </remarks>
/// <param name="message">Message to publish</param>
/// <param name="delayUntil">When to send</param>
/// <returns>IDisposable that you can use to cancel the operation if necessary. You do not need to call Dispose
/// for any other reason.</returns>
public static IDisposable publish<T>(T message, DateTime delayUntil) =>
InMessageLoop
? safedelay(() => ActorContext.Publish(message), delayUntil)
: raiseUseInMsgLoopOnlyException<IDisposable>(nameof(publish));
/// <summary>
/// Subscribes our inbox to another process publish stream. When it calls 'publish' it will
/// arrive in our inbox.
/// </summary>
/// <param name="pid">Process to subscribe to</param>
/// <remarks>
/// This should be used from within a process' message loop only
/// </remarks>
/// <returns>IDisposable, call IDispose to end the subscription</returns>
public static Unit subscribe(ProcessId pid)
{
var savedSelf = Self;
return InMessageLoop
? ActorContext.Request.Self.Actor.AddSubscription(pid, ActorContext.System(pid).Observe<object>(pid).Subscribe(x => tell(savedSelf, x, pid)))
: raiseUseInMsgLoopOnlyException<Unit>(nameof(subscribe));
}
/// <summary>
/// Unsubscribe from a process's publications
/// </summary>
/// <param name="pid">Process to unsub from</param>
public static Unit unsubscribe(ProcessId pid) =>
InMessageLoop
? ActorContext.Request.Self.Actor.RemoveSubscription(pid)
: raiseUseInMsgLoopOnlyException<Unit>(nameof(unsubscribe));
/// <summary>
/// Subscribe to the process publish stream. When a process calls 'publish' it emits
/// messages that can be consumed using this method.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, therefore
/// you can't call it from within a process message loop.
/// </remarks>
/// <returns>IDisposable, call IDispose to end the subscription</returns>
public static IDisposable subscribe<T>(ProcessId pid, IObserver<T> observer) =>
InMessageLoop
? raiseDontUseInMessageLoopException<IDisposable>(nameof(subscribe))
: observe<T>(pid).Subscribe(observer);
/// <summary>
/// Subscribe to the process publish stream. When a process calls 'publish' it emits
/// messages that can be consumed using this method.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, therefore
/// you can't call it from within a process message loop.
/// </remarks>
/// <returns>IDisposable, call IDispose to end the subscription</returns>
public static IDisposable subscribe<T>(ProcessId pid, Action<T> onNext, Action<Exception> onError, Action onComplete) =>
InMessageLoop
? raiseDontUseInMessageLoopException<IDisposable>(nameof(subscribe))
: observe<T>(pid).Subscribe(onNext, onError, onComplete);
/// <summary>
/// Subscribe to the process publish stream. When a process calls 'publish' it emits
/// messages that can be consumed using this method.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, therefore
/// you can't call it from within a process message loop.
/// </remarks>
public static IDisposable subscribe<T>(ProcessId pid, Action<T> onNext, Action<Exception> onError) =>
InMessageLoop
? raiseDontUseInMessageLoopException<IDisposable>(nameof(subscribe))
: observe<T>(pid).Subscribe(onNext, onError, () => { });
/// <summary>
/// Subscribe to the process publish stream. When a process calls 'publish' it emits
/// messages that can be consumed using this method.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, therefore
/// you can't call it from within a process message loop.
/// </remarks>
public static IDisposable subscribe<T>(ProcessId pid, Action<T> onNext) =>
InMessageLoop
? raiseDontUseInMessageLoopException<IDisposable>(nameof(subscribe))
: observe<T>(pid).Subscribe(onNext, ex => { }, () => { });
/// <summary>
/// Subscribe to the process publish stream. When a process calls 'publish' it emits
/// messages that can be consumed using this method.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, therefore
/// you can't call it from within a process message loop.
/// </remarks>
/// <returns>IDisposable, call IDispose to end the subscription</returns>
public static IDisposable subscribe<T>(ProcessId pid, Action<T> onNext, Action onComplete) =>
InMessageLoop
? raiseDontUseInMessageLoopException<IDisposable>(nameof(subscribe))
: observe<T>(pid).Subscribe(onNext, ex => { }, onComplete);
/// <summary>
/// Get an IObservable for a process publish stream. When a process calls 'publish' it emits
/// messages on the observable returned by this method.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, therefore
/// you can't call it from within a process message loop.
/// </remarks>
/// <returns>IObservable T</returns>
public static IObservable<T> observe<T>(ProcessId pid) =>
InMessageLoop
? raiseDontUseInMessageLoopException<IObservable<T>>(nameof(observe))
: observeUnsafe<T>(pid);
/// <summary>
/// Get an IObservable for a process publish stream. When a process calls 'publish' it emits
/// messages on the observable returned by this method.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, that's why this
/// function is labelled `Unsafe`. Careful disposing and capture of free variables is required to not
/// break the principles of actors.
/// </remarks>
/// <returns>IObservable T</returns>
public static IObservable<T> observeUnsafe<T>(ProcessId pid) =>
ActorContext.System(pid).Observe<T>(pid);
/// <summary>
/// Get an IObservable for a process's state stream. When a process state updates at the end of its
/// message loop it announces it on the stream returned from this method. You should use this for
/// notification only. Never modify the state object belonging to a process. Best practice is to make
/// the state type immutable.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, therefore
/// you can't call it from within a process message loop.
/// </remarks>
/// <returns>IObservable T</returns>
public static IObservable<T> observeState<T>(ProcessId pid) =>
InMessageLoop
? raiseDontUseInMessageLoopException<IObservable<T>>(nameof(observeState))
: observeStateUnsafe<T>(pid);
/// <summary>
/// Get an IObservable for a process's state stream. When a process state updates at the end of its
/// message loop it announces it on the stream returned from this method. You should use this for
/// notification only. Never modify the state object belonging to a process. Best practice is to make
/// the state type immutable.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
///
/// Because this call is asychronous it could allow access to the message loop, that's why this
/// function is labelled `Unsafe`. Careful disposing and capture of free variables is required to not
/// break the principles of actors.
/// </remarks>
/// <returns>IObservable T</returns>
public static IObservable<T> observeStateUnsafe<T>(ProcessId pid) =>
ActorContext.System(pid).ObserveState<T>(pid);
/// <summary>
/// Subscribes our inbox to another process state publish stream.
/// When a process state updates at the end of its message loop it announces it arrives in our inbox.
/// You should use this for notification only. Never modify the state object belonging to a process.
/// Best practice is to make the state type immutable.
/// </summary>
/// <remarks>
/// The process can publish any number of types, any published messages not of type T will be ignored.
/// This should be used from within a process' message loop only
/// </remarks>
/// <returns></returns>
public static Unit subscribeState<T>(ProcessId pid)
{
var savedSelf = Self;
return InMessageLoop
? ActorContext.Request.Self.Actor.AddSubscription(
pid,
ActorContext.System(pid).ObserveState<T>(pid).Subscribe(x => tell(savedSelf, x, pid)))
: raiseUseInMsgLoopOnlyException<Unit>(nameof(subscribeState));
}
}
}
| 51.287449 | 157 | 0.620066 | [
"MIT"
] | OlduwanSteve/echo-process | Echo.Process/Prelude_PubSub.cs | 12,670 | C# |
#if UNITY_EDITOR
using UnityEngine;
using NUnit.Framework;
using UnityEditor.TestTools.TestRunner.Api;
namespace Miscreant.Lifecycle.RuntimeTests
{
using TestUtils;
/// <summary>
/// Editor-only class. Logs a list of all PLAY mode tests each time any play mode test is run in the editor.
/// </summary>
[SetUpFixture]
public sealed class TestLogger_PlayMode
{
[OneTimeSetUp]
public void OneTimeSetUp()
{
var api = ScriptableObject.CreateInstance<TestRunnerApi>();
api.RetrieveTestList(TestMode.PlayMode, (testRoot) =>
{
TestLogger.SaveToDisk(
testRoot,
Application.dataPath.Substring(0, Application.dataPath.LastIndexOf("Assets")) + "Packages/Lifecycle/Tests",
"TestLog-PlayMode.txt"
);
});
}
}
}
#endif | 23.060606 | 112 | 0.718791 | [
"MIT"
] | Bunderant/unity-miscreant-lifecycle | Tests/Runtime/Scripts/TestLogger_PlayMode.cs | 763 | C# |
using System;
using HotChocolate.Language;
using HotChocolate.Types.Descriptors;
using HotChocolate.Types.Filters.Conventions;
namespace HotChocolate.Types.Filters
{
public class ObjectFilterOperationDescriptor<TObject>
: ObjectFilterOperationDescriptor
, IObjectFilterOperationDescriptor<TObject>
{
private readonly ObjectFilterFieldDescriptor<TObject> _descriptor;
protected ObjectFilterOperationDescriptor(
IDescriptorContext context,
ObjectFilterFieldDescriptor<TObject> descriptor,
NameString name,
ITypeReference type,
FilterOperation operation,
IFilterConvention filterConventions)
: base(context, descriptor, name, type, operation, filterConventions)
{
_descriptor = descriptor ?? throw new ArgumentNullException(nameof(descriptor));
}
/// <inheritdoc/>
public new IObjectFilterFieldDescriptor<TObject> And() => _descriptor;
/// <inheritdoc/>
public new IObjectFilterOperationDescriptor<TObject> Name(NameString value)
{
base.Name(value);
return this;
}
/// <inheritdoc/>
public new IObjectFilterOperationDescriptor<TObject> Description(
string value)
{
base.Description(value);
return this;
}
/// <inheritdoc/>
public new IObjectFilterOperationDescriptor<TObject> Directive<T>(
T directiveInstance)
where T : class
{
base.Directive(directiveInstance);
return this;
}
/// <inheritdoc/>
public new IObjectFilterOperationDescriptor<TObject> Directive<T>()
where T : class, new()
{
base.Directive<T>();
return this;
}
/// <inheritdoc/>
public new IObjectFilterOperationDescriptor<TObject> Directive(
NameString name,
params ArgumentNode[] arguments)
{
base.Directive(name, arguments);
return this;
}
/// <summary>
/// Create a new string filter operation descriptor.
/// </summary>
/// <param name="context">
/// The descriptor context.
/// </param>
/// <param name="descriptor">
/// The field descriptor on which this
/// filter operation shall be applied.
/// </param>
/// <param name="name">
/// The default name of the filter operation field.
/// </param>
/// <param name="type">
/// The field type of this filter operation field.
/// </param>
/// <param name="operation">
/// The filter operation info.
/// </param>
/// <param name="filterConventions">
/// The filter conventions
/// </param>
public static ObjectFilterOperationDescriptor<TObject> New(
IDescriptorContext context,
ObjectFilterFieldDescriptor<TObject> descriptor,
NameString name,
ITypeReference type,
FilterOperation operation,
IFilterConvention filterConventions) =>
new ObjectFilterOperationDescriptor<TObject>(
context, descriptor, name, type, operation, filterConventions);
}
}
| 32.776699 | 92 | 0.591528 | [
"MIT"
] | DaanAcohen/hotchocolate | src/HotChocolate/Filters/src/Types.Filters/Object/ObjectFilterOperationDescriptor~1.cs | 3,376 | C# |
// Copyright 2017 DAIMTO ([Linda Lawton](https://twitter.com/LindaLawtonDK)) : [www.daimto.com](http://www.daimto.com/)
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by DAIMTO-Google-apis-Sample-generator 1.0.0
// Template File Name: methodTemplate.tt
// Build date: 2017-10-08
// C# generater version: 1.0.0
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
// About
//
// Unoffical sample for the Compute beta API for C#.
// This sample is designed to be used with the Google .Net client library. (https://github.com/google/google-api-dotnet-client)
//
// API Description: Creates and runs virtual machines on Google Cloud Platform.
// API Documentation Link https://developers.google.com/compute/docs/reference/latest/
//
// Discovery Doc https://www.googleapis.com/discovery/v1/apis/Compute/beta/rest
//
//------------------------------------------------------------------------------
// Installation
//
// This sample code uses the Google .Net client library (https://github.com/google/google-api-dotnet-client)
//
// NuGet package:
//
// Location: https://www.nuget.org/packages/Google.Apis.Compute.beta/
// Install Command: PM> Install-Package Google.Apis.Compute.beta
//
//------------------------------------------------------------------------------
using Google.Apis.Compute.beta;
using Google.Apis.Compute.beta.Data;
using System;
namespace GoogleSamplecSharpSample.Computebeta.Methods
{
public static class HealthChecksSample
{
public class HealthChecksDeleteOptionalParms
{
/// An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
public string RequestId { get; set; }
}
/// <summary>
/// Deletes the specified HealthCheck resource.
/// Documentation https://developers.google.com/compute/beta/reference/healthChecks/delete
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Compute service.</param>
/// <param name="project">Project ID for this request.</param>
/// <param name="healthCheck">Name of the HealthCheck resource to delete.</param>
/// <param name="optional">Optional paramaters.</param>
/// <returns>OperationResponse</returns>
public static Operation Delete(ComputeService service, string project, string healthCheck, HealthChecksDeleteOptionalParms optional = null)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (project == null)
throw new ArgumentNullException(project);
if (healthCheck == null)
throw new ArgumentNullException(healthCheck);
// Building the initial request.
var request = service.HealthChecks.Delete(project, healthCheck);
// Applying optional parameters to the request.
request = (HealthChecksResource.DeleteRequest)SampleHelpers.ApplyOptionalParms(request, optional);
// Requesting data.
return request.Execute();
}
catch (Exception ex)
{
throw new Exception("Request HealthChecks.Delete failed.", ex);
}
}
/// <summary>
/// Returns the specified HealthCheck resource. Get a list of available health checks by making a list() request.
/// Documentation https://developers.google.com/compute/beta/reference/healthChecks/get
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Compute service.</param>
/// <param name="project">Project ID for this request.</param>
/// <param name="healthCheck">Name of the HealthCheck resource to return.</param>
/// <returns>HealthCheckResponse</returns>
public static HealthCheck Get(ComputeService service, string project, string healthCheck)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (project == null)
throw new ArgumentNullException(project);
if (healthCheck == null)
throw new ArgumentNullException(healthCheck);
// Make the request.
return service.HealthChecks.Get(project, healthCheck).Execute();
}
catch (Exception ex)
{
throw new Exception("Request HealthChecks.Get failed.", ex);
}
}
public class HealthChecksInsertOptionalParms
{
/// An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
public string RequestId { get; set; }
}
/// <summary>
/// Creates a HealthCheck resource in the specified project using the data included in the request.
/// Documentation https://developers.google.com/compute/beta/reference/healthChecks/insert
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Compute service.</param>
/// <param name="project">Project ID for this request.</param>
/// <param name="body">A valid Compute beta body.</param>
/// <param name="optional">Optional paramaters.</param>
/// <returns>OperationResponse</returns>
public static Operation Insert(ComputeService service, string project, HealthCheck body, HealthChecksInsertOptionalParms optional = null)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
if (project == null)
throw new ArgumentNullException(project);
// Building the initial request.
var request = service.HealthChecks.Insert(body, project);
// Applying optional parameters to the request.
request = (HealthChecksResource.InsertRequest)SampleHelpers.ApplyOptionalParms(request, optional);
// Requesting data.
return request.Execute();
}
catch (Exception ex)
{
throw new Exception("Request HealthChecks.Insert failed.", ex);
}
}
public class HealthChecksListOptionalParms
{
/// Sets a filter {expression} for filtering listed resources. Your {expression} must be in the format: field_name comparison_string literal_string.The field_name is the name of the field you want to compare. Only atomic field types are supported (string, number, boolean). The comparison_string must be either eq (equals) or ne (not equals). The literal_string is the string value to filter to. The literal value must be valid for the type of field you are filtering by (string, number, boolean). For string fields, the literal value is interpreted as a regular expression using RE2 syntax. The literal value must match the entire field.For example, to filter for instances that do not have a name of example-instance, you would use name ne example-instance.You can filter on nested fields. For example, you could filter on instances that have set the scheduling.automaticRestart field to true. Use filtering on nested fields to take advantage of labels to organize and search for results based on label values.To filter on multiple expressions, provide each separate expression within parentheses. For example, (scheduling.automaticRestart eq true) (zone eq us-central1-f). Multiple expressions are treated as AND expressions, meaning that resources must match all expressions to pass the filters.
public string Filter { get; set; }
/// The maximum number of results per page that should be returned. If the number of available results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to get the next page of results in subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default: 500)
public int? MaxResults { get; set; }
/// Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.You can also sort results in descending order based on the creation timestamp using orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.Currently, only sorting by name or creationTimestamp desc is supported.
public string OrderBy { get; set; }
/// Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list request to get the next page of results.
public string PageToken { get; set; }
}
/// <summary>
/// Retrieves the list of HealthCheck resources available to the specified project.
/// Documentation https://developers.google.com/compute/beta/reference/healthChecks/list
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Compute service.</param>
/// <param name="project">Project ID for this request.</param>
/// <param name="optional">Optional paramaters.</param>
/// <returns>HealthCheckListResponse</returns>
public static HealthCheckList List(ComputeService service, string project, HealthChecksListOptionalParms optional = null)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (project == null)
throw new ArgumentNullException(project);
// Building the initial request.
var request = service.HealthChecks.List(project);
// Applying optional parameters to the request.
request = (HealthChecksResource.ListRequest)SampleHelpers.ApplyOptionalParms(request, optional);
// Requesting data.
return request.Execute();
}
catch (Exception ex)
{
throw new Exception("Request HealthChecks.List failed.", ex);
}
}
public class HealthChecksPatchOptionalParms
{
/// An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
public string RequestId { get; set; }
}
/// <summary>
/// Updates a HealthCheck resource in the specified project using the data included in the request. This method supports PATCH semantics and uses the JSON merge patch format and processing rules.
/// Documentation https://developers.google.com/compute/beta/reference/healthChecks/patch
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Compute service.</param>
/// <param name="project">Project ID for this request.</param>
/// <param name="healthCheck">Name of the HealthCheck resource to patch.</param>
/// <param name="body">A valid Compute beta body.</param>
/// <param name="optional">Optional paramaters.</param>
/// <returns>OperationResponse</returns>
public static Operation Patch(ComputeService service, string project, string healthCheck, HealthCheck body, HealthChecksPatchOptionalParms optional = null)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
if (project == null)
throw new ArgumentNullException(project);
if (healthCheck == null)
throw new ArgumentNullException(healthCheck);
// Building the initial request.
var request = service.HealthChecks.Patch(body, project, healthCheck);
// Applying optional parameters to the request.
request = (HealthChecksResource.PatchRequest)SampleHelpers.ApplyOptionalParms(request, optional);
// Requesting data.
return request.Execute();
}
catch (Exception ex)
{
throw new Exception("Request HealthChecks.Patch failed.", ex);
}
}
/// <summary>
/// Returns permissions that a caller has on the specified resource.
/// Documentation https://developers.google.com/compute/beta/reference/healthChecks/testIamPermissions
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Compute service.</param>
/// <param name="project">Project ID for this request.</param>
/// <param name="resource">Name of the resource for this request.</param>
/// <param name="body">A valid Compute beta body.</param>
/// <returns>TestPermissionsResponseResponse</returns>
public static TestPermissionsResponse TestIamPermissions(ComputeService service, string project, string resource, TestPermissionsRequest body)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
if (project == null)
throw new ArgumentNullException(project);
if (resource == null)
throw new ArgumentNullException(resource);
// Make the request.
return service.HealthChecks.TestIamPermissions(body, project, resource).Execute();
}
catch (Exception ex)
{
throw new Exception("Request HealthChecks.TestIamPermissions failed.", ex);
}
}
public class HealthChecksUpdateOptionalParms
{
/// An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
public string RequestId { get; set; }
}
/// <summary>
/// Updates a HealthCheck resource in the specified project using the data included in the request.
/// Documentation https://developers.google.com/compute/beta/reference/healthChecks/update
/// Generation Note: This does not always build corectly. Google needs to standardise things I need to figuer out which ones are wrong.
/// </summary>
/// <param name="service">Authenticated Compute service.</param>
/// <param name="project">Project ID for this request.</param>
/// <param name="healthCheck">Name of the HealthCheck resource to update.</param>
/// <param name="body">A valid Compute beta body.</param>
/// <param name="optional">Optional paramaters.</param>
/// <returns>OperationResponse</returns>
public static Operation Update(ComputeService service, string project, string healthCheck, HealthCheck body, HealthChecksUpdateOptionalParms optional = null)
{
try
{
// Initial validation.
if (service == null)
throw new ArgumentNullException("service");
if (body == null)
throw new ArgumentNullException("body");
if (project == null)
throw new ArgumentNullException(project);
if (healthCheck == null)
throw new ArgumentNullException(healthCheck);
// Building the initial request.
var request = service.HealthChecks.Update(body, project, healthCheck);
// Applying optional parameters to the request.
request = (HealthChecksResource.UpdateRequest)SampleHelpers.ApplyOptionalParms(request, optional);
// Requesting data.
return request.Execute();
}
catch (Exception ex)
{
throw new Exception("Request HealthChecks.Update failed.", ex);
}
}
}
public static class SampleHelpers
{
/// <summary>
/// Using reflection to apply optional parameters to the request.
///
/// If the optonal parameters are null then we will just return the request as is.
/// </summary>
/// <param name="request">The request. </param>
/// <param name="optional">The optional parameters. </param>
/// <returns></returns>
public static object ApplyOptionalParms(object request, object optional)
{
if (optional == null)
return request;
System.Reflection.PropertyInfo[] optionalProperties = (optional.GetType()).GetProperties();
foreach (System.Reflection.PropertyInfo property in optionalProperties)
{
// Copy value from optional parms to the request. They should have the same names and datatypes.
System.Reflection.PropertyInfo piShared = (request.GetType()).GetProperty(property.Name);
if (property.GetValue(optional, null) != null) // TODO Test that we do not add values for items that are null
piShared.SetValue(request, property.GetValue(optional, null), null);
}
return request;
}
}
} | 58.954301 | 1,311 | 0.636952 | [
"Apache-2.0"
] | AhmerRaza/Google-Dotnet-Samples | Samples/Compute Engine API/beta/HealthChecksSample.cs | 21,933 | C# |
using System;
using System.Net.Http;
using ResultFunctional.Models.Enums;
using ResultFunctional.Models.Implementations.Errors;
using ResultFunctional.Models.Implementations.Errors.AuthorizeErrors;
using ResultFunctional.Models.Implementations.Errors.Base;
using ResultFunctional.Models.Implementations.Errors.CommonErrors;
using ResultFunctional.Models.Implementations.Errors.ConversionErrors;
using ResultFunctional.Models.Implementations.Errors.DatabaseErrors;
using ResultFunctional.Models.Implementations.Errors.RestErrors;
using ResultFunctional.Models.Interfaces.Errors.CommonErrors;
using ResultFunctional.Models.Interfaces.Errors.conversionErrors;
using ResultFunctional.Models.Interfaces.Errors.ConversionErrors;
using ResultFunctional.Models.Interfaces.Errors.DatabaseErrors;
using Xunit;
namespace ResultFunctionalXUnit.Models.Errors
{
/// <summary>
/// Фабрика создания типов ошибок. Тесты
/// </summary>
public class ErrorResultFactoryTest
{
/// <summary>
/// Простая ошибка
/// </summary>
[Fact]
public void SimpleError()
{
var errorResult = ErrorResultFactory.SimpleErrorType("Ошибка");
Assert.IsType<SimpleErrorResult>(errorResult);
Assert.IsType<SimpleErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка с неопределенным типом
/// </summary>
[Fact]
public void ErrorType()
{
var errorResult = ErrorResultFactory.ErrorType(CommonErrorType.Unknown, "Ошибка");
Assert.IsType<ErrorTypeResult<CommonErrorType>>(errorResult);
Assert.IsType<ErrorTypeResult<CommonErrorType>>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Общая ошибка
/// </summary>
[Fact]
public void CommonError()
{
var errorResult = ErrorResultFactory.CommonError(CommonErrorType.Unknown, "Ошибка");
Assert.IsType<CommonErrorResult>(errorResult);
Assert.IsType<CommonErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка не найденного значения
/// </summary>
[Fact]
public void ValueNotFoundError()
{
var errorResult = ErrorResultFactory.ValueNotFoundError(String.Empty, typeof(string));
Assert.IsAssignableFrom<IValueNotFoundErrorResult>(errorResult);
Assert.IsAssignableFrom<IValueNotFoundErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка неверного значения
/// </summary>
[Fact]
public void ValueNotValidError()
{
var errorResult = ErrorResultFactory.ValueNotValidError(String.Empty, typeof(string), "Ошибка");
Assert.IsAssignableFrom<IValueNotValidErrorResult>(errorResult);
Assert.IsAssignableFrom<IValueNotValidErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка дублирующего значения
/// </summary>
[Fact]
public void ValueDuplicatedError()
{
var errorResult = ErrorResultFactory.ValueDuplicateError(String.Empty, typeof(string), "Ошибка");
Assert.IsAssignableFrom<IValueDuplicateErrorResult>(errorResult);
Assert.IsAssignableFrom<IValueDuplicateErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка авторизации
/// </summary>
[Fact]
public void AuthorizeError()
{
var errorResult = ErrorResultFactory.AuthorizeError(AuthorizeErrorType.Password, "Ошибка");
Assert.IsType<AuthorizeErrorResult>(errorResult);
Assert.IsType<AuthorizeErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка базы данных
/// </summary>
[Fact]
public void DatabaseAccessError()
{
var errorResult = ErrorResultFactory.DatabaseAccessError("Table", "Ошибка");
Assert.IsType<DatabaseAccessErrorResult>(errorResult);
Assert.IsType<DatabaseAccessErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка базы данных
/// </summary>
[Fact]
public void DatabaseConnectionError()
{
var errorResult = ErrorResultFactory.DatabaseConnectionError("Table", "Ошибка");
Assert.IsType<DatabaseConnectionErrorResult>(errorResult);
Assert.IsType<DatabaseConnectionErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка базы данных
/// </summary>
[Fact]
public void DatabaseValueNotValidError()
{
var errorResult = ErrorResultFactory.DatabaseValueNotValidError(String.Empty, "Table", "Ошибка");
Assert.IsAssignableFrom<IDatabaseValueNotValidErrorResult>(errorResult);
Assert.IsAssignableFrom<IDatabaseValueNotValidErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка базы данных
/// </summary>
[Fact]
public void DatabaseValueNotFoundError()
{
var errorResult = ErrorResultFactory.DatabaseValueNotFoundError(String.Empty, "Table", "Ошибка");
Assert.IsAssignableFrom<IDatabaseValueNotFoundErrorResult>(errorResult);
Assert.IsAssignableFrom<IDatabaseValueNotFoundErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка базы данных
/// </summary>
[Fact]
public void DatabaseValueDuplicateError()
{
var errorResult = ErrorResultFactory.DatabaseValueDuplicateError(String.Empty, "Table", "Ошибка");
Assert.IsAssignableFrom<IDatabaseValueDuplicatedErrorResult>(errorResult);
Assert.IsAssignableFrom<IDatabaseValueDuplicatedErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка rest сервиса
/// </summary>
[Fact]
public void RestHostError()
{
var errorResult = ErrorResultFactory.RestHostError(RestErrorType.BadRequest, "host", "Ошибка");
Assert.IsType<RestHostErrorResult>(errorResult);
Assert.IsType<RestHostErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка времени ожидания rest сервиса
/// </summary>
[Fact]
public void RestTimeoutError()
{
var errorResult = ErrorResultFactory.RestTimeoutError("host", TimeSpan.FromSeconds(5), "Ошибка");
Assert.IsType<RestTimeoutErrorResult>(errorResult);
Assert.IsType<RestTimeoutErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка rest сервиса
/// </summary>
[Fact]
public void RestMessageError()
{
var errorResult = ErrorResultFactory.RestError(RestErrorType.BadRequest, new HttpResponseMessage(), "Ошибка");
Assert.IsType<RestMessageErrorResult>(errorResult);
Assert.IsType<RestMessageErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка сериализации
/// </summary>
[Fact]
public void SerializeError()
{
var errorResult = ErrorResultFactory.SerializeError(String.Empty, "Ошибка");
Assert.IsAssignableFrom<ISerializeErrorResult>(errorResult);
Assert.IsAssignableFrom<ISerializeErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка десериализации
/// </summary>
[Fact]
public void DeserializeError()
{
var errorResult = ErrorResultFactory.DeserializeError<string>(String.Empty, "Ошибка");
Assert.IsAssignableFrom<IDeserializeErrorResult>(errorResult);
Assert.IsAssignableFrom<IDeserializeErrorResult>(errorResult.AppendException(new Exception()));
}
/// <summary>
/// Ошибка схемы
/// </summary>
[Fact]
public void JsonSchemeError()
{
var errorResult = ErrorResultFactory.JsonSchemeError("scheme", "Ошибка");
Assert.IsAssignableFrom<JsonSchemeErrorResult>(errorResult);
Assert.IsAssignableFrom<JsonSchemeErrorResult>(errorResult.AppendException(new Exception()));
}
}
} | 36.456432 | 122 | 0.6458 | [
"MIT"
] | rubilnik4/ResultFunctional | ResultFunctionalXUnit/Models/Errors/ErrorResultFactoryTest.cs | 9,246 | C# |
#region License
// Copyright (c) 2009, ClearCanvas Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of ClearCanvas Inc. nor the names of its contributors
// may be used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
// OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
// GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
// OF SUCH DAMAGE.
#endregion
using System;
using System.ServiceModel;
using ClearCanvas.Enterprise.Common;
namespace ClearCanvas.Ris.Application.Common.Admin.NoteCategoryAdmin
{
/// <summary>
/// Provides operations to administer note categories
/// </summary>
[RisApplicationService]
[ServiceContract]
public interface INoteCategoryAdminService
{
/// <summary>
/// Summary list of all note categories
/// </summary>
/// <param name="request"><see cref="ListAllNoteCategoriesRequest"/></param>
/// <returns><see cref="ListAllNoteCategoriesResponse"/></returns>
[OperationContract]
ListAllNoteCategoriesResponse ListAllNoteCategories(ListAllNoteCategoriesRequest request);
/// <summary>
/// Add a new note category. A note category with the same name as an existing note category cannnot be added.
/// </summary>
/// <param name="request"><see cref="AddNoteCategoryRequest"/></param>
/// <returns><see cref="AddNoteCategoryResponse"/></returns>
[OperationContract]
[FaultContract(typeof(RequestValidationException))]
AddNoteCategoryResponse AddNoteCategory(AddNoteCategoryRequest request);
/// <summary>
/// Update a new note category. A note category with the same name as an existing note category cannnot be updated.
/// </summary>
/// <param name="request"><see cref="UpdateNoteCategoryRequest"/></param>
/// <returns><see cref="UpdateNoteCategoryResponse"/></returns>
[OperationContract]
[FaultContract(typeof(ConcurrentModificationException))]
[FaultContract(typeof(RequestValidationException))]
UpdateNoteCategoryResponse UpdateNoteCategory(UpdateNoteCategoryRequest request);
/// <summary>
/// Delete a note category.
/// </summary>
/// <param name="request"><see cref="DeleteNoteCategoryRequest "/></param>
/// <returns><see cref="DeleteNoteCategoryResponse"/></returns>
[OperationContract]
[FaultContract(typeof(ConcurrentModificationException))]
[FaultContract(typeof(RequestValidationException))]
DeleteNoteCategoryResponse DeleteNoteCategory(DeleteNoteCategoryRequest request);
/// <summary>
/// Loads all form data needed to edit a note category
/// </summary>
/// <param name="request"><see cref="GetNoteCategoryEditFormDataRequest"/></param>
/// <returns><see cref="GetNoteCategoryEditFormDataResponse"/></returns>
[OperationContract]
GetNoteCategoryEditFormDataResponse GetNoteCategoryEditFormData(GetNoteCategoryEditFormDataRequest request);
/// <summary>
/// Load details for a note category
/// </summary>
/// <param name="request"><see cref="LoadNoteCategoryForEditRequest"/></param>
/// <returns><see cref="LoadNoteCategoryForEditResponse"/></returns>
[OperationContract]
LoadNoteCategoryForEditResponse LoadNoteCategoryForEdit(LoadNoteCategoryForEditRequest request);
}
}
| 48.020202 | 125 | 0.700463 | [
"Apache-2.0"
] | econmed/ImageServer20 | Ris/Application/Common/Admin/NoteCategoryAdmin/INoteCategoryAdminService.cs | 4,756 | C# |
// Copyright (c) MASA Stack All rights reserved.
// Licensed under the MIT License. See LICENSE.txt in the project root for license information.
namespace Masa.Contrib.Storage.ObjectStorage.Aliyun;
/// <summary>
/// For internal use, structure may change at any time
/// </summary>
public interface ICredentialProvider
{
bool SupportSts { get; }
TemporaryCredentialsResponse GetSecurityToken();
}
| 27.2 | 95 | 0.754902 | [
"MIT"
] | Sky-nt/MASA.Contrib | src/Storage/Masa.Contrib.Storage.ObjectStorage.Aliyun/ICredentialProvider.cs | 408 | C# |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using Microsoft.AspNetCore.Razor.Language;
using Microsoft.CodeAnalysis.Razor;
using Microsoft.CodeAnalysis.Razor.ProjectSystem;
namespace Microsoft.AspNetCore.Razor.Test.Common
{
internal class TestProjectSnapshotProjectEngineFactory : ProjectSnapshotProjectEngineFactory
{
public Action<RazorProjectEngineBuilder> Configure { get; set; }
public RazorProjectEngine Engine { get; set; }
public override RazorProjectEngine Create(RazorConfiguration configuration, RazorProjectFileSystem fileSystem, Action<RazorProjectEngineBuilder> configure)
{
return Engine ?? RazorProjectEngine.Create(configuration, fileSystem, configure ?? Configure);
}
public override IProjectEngineFactory FindFactory(ProjectSnapshot project)
{
throw new NotImplementedException();
}
public override IProjectEngineFactory FindSerializableFactory(ProjectSnapshot project)
{
throw new NotImplementedException();
}
}
}
| 36.787879 | 163 | 0.73888 | [
"Apache-2.0"
] | Chatina73/AspNetCore-Tooling | src/Razor/test/Microsoft.AspNetCore.Razor.LanguageServer.Test.Common/TestProjectSnapshotProjectEngineFactory.cs | 1,216 | C# |
using System;
using System.CodeDom.Compiler;
using System.ComponentModel;
using System.Diagnostics;
using System.ServiceModel;
namespace Workday.Payroll
{
[GeneratedCode("System.ServiceModel", "4.0.0.0"), EditorBrowsable(EditorBrowsableState.Advanced), DebuggerStepThrough, MessageContract(IsWrapped = false)]
public class Get_Period_SchedulesOutput
{
[MessageBodyMember(Namespace = "urn:com.workday/bsvc", Order = 0)]
public Get_Period_Schedules_ResponseType Get_Period_Schedules_Response;
public Get_Period_SchedulesOutput()
{
}
public Get_Period_SchedulesOutput(Get_Period_Schedules_ResponseType Get_Period_Schedules_Response)
{
this.Get_Period_Schedules_Response = Get_Period_Schedules_Response;
}
}
}
| 29.28 | 155 | 0.81694 | [
"MIT"
] | matteofabbri/Workday.WebServices | Workday.Payroll/Get_Period_SchedulesOutput.cs | 732 | C# |
using API;
using PxStat.Data;
using System.Collections.Generic;
using System.Data;
namespace PxStat.System.Navigation
{
/// <summary>
/// Creates the mandatory Release Keywords. These are based on fields in the Matrix and associated entities
/// </summary>
internal class Keyword_Release_BSO_CreateMandatory
{
/// <summary>
/// Create a keyword release
/// </summary>
/// <param name="Ado"></param>
/// <param name="releaseId"></param>
/// <param name="userName"></param>
internal void Create(ADO Ado, int releaseId, string userName)
{
Release_ADO rAdo = new Data.Release_ADO(Ado);
var latestRelease = Release_ADO.GetReleaseDTO(rAdo.ReadID(releaseId, userName));
if (latestRelease == null) return;
rAdo.DeleteKeywords(latestRelease.RlsCode, userName);
//Create the table that will be bulk inserted
DataTable dt = new DataTable();
dt.Columns.Add("KRL_VALUE", typeof(string));
dt.Columns.Add("KRL_RLS_ID", typeof(int));
dt.Columns.Add("KRL_MANDATORY_FLAG", typeof(bool));
dt.Columns.Add("KRL_SINGULARISED_FLAG", typeof(bool));
Keyword_Release_ADO adoKeywordCreate = new Keyword_Release_ADO();
//Get the matrices for the RlsCode - There may be more than one e.g. if multiple languages are used in the same release
Matrix_ADO mAdo = new Matrix_ADO(Ado);
IList<dynamic> matrixList;
matrixList = mAdo.ReadAllForRelease(latestRelease.RlsCode, userName);
//Create the table rows
foreach (dynamic item in matrixList)
{
//Get a Keyword Extractor - the particular version returned will depend on the language
Keyword_BSO_Extract kbe = new Keyword_BSO_Extract(item.LngIsoCode);
// IKeywordExtractor ext = kbe.GetExtractor();
//Add the keywords and other data to the output table
AddToTable(ref dt, kbe.ExtractSplitSingular(item.MtrTitle), item.RlsID);
//Add the MtrCode to the keyword list without any transformations
AddToTable(ref dt, new List<string>() { item.MtrCode }, item.RlsID, false);
//Add the Copyright Code
AddToTable(ref dt, kbe.ExtractSplit(item.CprCode), item.RlsID, false);
//Add the Copyright Value
AddToTable(ref dt, kbe.ExtractSplit(item.CprValue), item.RlsID, false);
//Add the Language
AddToTable(ref dt, kbe.ExtractSplit(item.LngIsoName), item.RlsID, false);
//Add the Frequency Code
//AddToTable(ref dt, ext.ExtractSplit(item.FrqCode), item.RlsID);
//Add the Frequency Value
AddToTable(ref dt, kbe.ExtractSplitSingular(item.FrqValue), item.RlsID);
//Get the keywords from Statistic
IList<dynamic> statList = mAdo.ReadStatisticByRelease(latestRelease.RlsCode, item.LngIsoCode);
foreach (dynamic statItem in statList)
{
AddToTable(ref dt, kbe.ExtractSplit(statItem.SttCode), item.RlsID, false);
AddToTable(ref dt, kbe.ExtractSplitSingular(statItem.SttValue), item.RlsID);
AddToTable(ref dt, kbe.ExtractSplitSingular(statItem.SttUnit), item.RlsID);
}
//Get the keywords from Classification
IList<dynamic> classificationList = mAdo.ReadClassificationByRelease(latestRelease.RlsCode, item.LngIsoCode);
foreach (dynamic classItem in classificationList)
{
AddToTable(ref dt, kbe.ExtractSplit(classItem.ClsCode), item.RlsID, false);
AddToTable(ref dt, kbe.ExtractSplitSingular(classItem.ClsValue), item.RlsID);
//Get the keywords from Variables
IList<dynamic> variableList = mAdo.ReadVariableByRelease(latestRelease.RlsCode, item.LngIsoCode, classItem.ClsCode);
foreach (dynamic variableItem in variableList)
{
AddToTable(ref dt, kbe.ExtractSplit(variableItem.VrbCode), item.RlsID, false);
AddToTable(ref dt, kbe.ExtractSplitSingular(variableItem.VrbValue), item.RlsID);
}
}
//Get the keywords from Period
IList<dynamic> periodList = mAdo.ReadPeriodByRelease(latestRelease.RlsCode, item.LngIsoCode, item.FrqCode);
foreach (dynamic periodItem in periodList)
{
AddToTable(ref dt, kbe.ExtractSplit(periodItem.PrdCode), item.RlsID, false);
AddToTable(ref dt, kbe.ExtractSplitSingular(periodItem.PrdValue), item.RlsID);
}
}
//Final check to make sure there are no duplicates
adoKeywordCreate.Create(Ado, dt.DefaultView.ToTable(true, new string[4] { "KRL_VALUE", "KRL_RLS_ID", "KRL_MANDATORY_FLAG", "KRL_SINGULARISED_FLAG" }));
}
/// <summary>
/// Adds a list of words to the table
/// </summary>
/// <param name="dt"></param>
/// <param name="words"></param>
/// <param name="rlsID"></param>
private void AddToTable(ref DataTable dt, List<string> words, int rlsID, bool singularised = true)
{
foreach (string word in words)
{
//First check if the keyword is already in the table. It doesn't need to go in twice!
string searchTerm = "KRL_VALUE = '" + word + "'";
DataRow[] dr = dt.Select(searchTerm);
if (dr.Length == 0)
{
var row = dt.NewRow();
row["KRL_VALUE"] = word;
row["KRL_RLS_ID"] = rlsID;
row["KRL_MANDATORY_FLAG"] = true;
row["KRL_SINGULARISED_FLAG"] = singularised;
dt.Rows.Add(row);
}
}
}
}
}
| 44.292857 | 163 | 0.583454 | [
"MIT"
] | CSOIreland/PxStat | server/PxStat/Entities/System/Navigation/Keyword_Release/BSO/Keyword_Release_BSO_CreateMandatory.cs | 6,203 | C# |
using System;
using System.Diagnostics;
using System.IO;
using System.Security.Cryptography;
using SeguraChain_Lib.Blockchain.Setting;
using SeguraChain_Lib.Utility;
namespace SeguraChain_Lib.Algorithm
{
public class ClassAes
{
/// <summary>
/// AES Settings.
/// </summary>
public const int IterationCount = 10240;
public const int IterationCountKeyGenerator = 1000;
public const int EncryptionKeySize = 256;
public const int EncryptionBlockSize = 128;
public const int EncryptionKeyByteArraySize = 32;
public const int IvSize = 16;
/// <summary>
/// Generate a IV from a key.
/// </summary>
/// <param name="key"></param>
/// <param name="iteration"></param>
/// <returns></returns>
public static byte[] GenerateIv(byte[] key, int iteration = IterationCount)
{
using (Rfc2898DeriveBytes passwordDeriveBytes = new Rfc2898DeriveBytes(key, BlockchainSetting.BlockchainMarkKey, iteration))
{
return passwordDeriveBytes.GetBytes(IvSize);
}
}
/// <summary>
/// Generate a key from a given data.
/// </summary>
/// <param name="data"></param>
/// <param name="useSha"></param>
/// <param name="key"></param>
/// <returns></returns>
public static bool GenerateKey(byte[] data, bool useSha, out byte[] key)
{
key = new byte[EncryptionKeyByteArraySize];
if (data.Length < EncryptionKeyByteArraySize)
{
useSha = true;
}
if (useSha)
{
data = GenerateIv(data, IterationCountKeyGenerator);
data = ClassUtility.GenerateSha512ByteArrayFromByteArray(data);
}
Array.Copy(data, 0, key, 0, EncryptionKeyByteArraySize);
return true;
}
/// <summary>
/// AES Encryption process.
/// </summary>
/// <param name="content"></param>
/// <param name="key"></param>
/// <param name="iv"></param>
/// <param name="result"></param>
/// <returns></returns>
public static bool EncryptionProcess(byte[] content, byte[] key, byte[] iv, out byte[] result)
{
try
{
using (RijndaelManaged aesObject = new RijndaelManaged())
{
aesObject.KeySize = EncryptionKeySize;
aesObject.BlockSize = EncryptionBlockSize;
aesObject.Key = key;
aesObject.IV = iv;
aesObject.Mode = CipherMode.CFB;
aesObject.Padding = PaddingMode.PKCS7;
using (ICryptoTransform encryptCryptoTransform = aesObject.CreateEncryptor(key, iv))
{
using (MemoryStream memoryStreamEncrypt = new MemoryStream())
{
using (CryptoStream cryptoStreamEncrypt = new CryptoStream(memoryStreamEncrypt, encryptCryptoTransform, CryptoStreamMode.Write))
{
cryptoStreamEncrypt.Write(content, 0, content.Length);
cryptoStreamEncrypt.FlushFinalBlock();
result = memoryStreamEncrypt.ToArray();
return true;
}
}
}
}
}
#if DEBUG
catch (Exception error)
{
Debug.WriteLine("Error on encrypt content. Exception: " + error.Message);
#else
catch
{
#endif
result = null;
return false;
}
}
/// <summary>
/// AES Decryption process.
/// </summary>
/// <param name="content"></param>
/// <param name="key"></param>
/// <param name="iv"></param>
/// <param name="result"></param>
/// <returns></returns>
public static bool DecryptionProcess(byte[] content, byte[] key, byte[] iv, out byte[] result)
{
try
{
using (RijndaelManaged aesObject = new RijndaelManaged())
{
aesObject.KeySize = EncryptionKeySize;
aesObject.BlockSize = EncryptionBlockSize;
aesObject.Key = key;
aesObject.IV = iv;
aesObject.Mode = CipherMode.CFB;
aesObject.Padding = PaddingMode.PKCS7;
using (ICryptoTransform decryptCryptoTransform = aesObject.CreateDecryptor(key, iv))
{
using (MemoryStream memoryStreamDecrypt = new MemoryStream())
{
using (CryptoStream cryptoStreamDecrypt = new CryptoStream(memoryStreamDecrypt, decryptCryptoTransform, CryptoStreamMode.Write))
{
cryptoStreamDecrypt.Write(content, 0, content.Length);
cryptoStreamDecrypt.FlushFinalBlock();
result = memoryStreamDecrypt.ToArray();
return true;
}
}
}
}
}
#if DEBUG
catch (Exception error)
{
Debug.WriteLine("Error on decrypt content. Exception: " + error.Message);
#else
catch
{
#endif
result = null;
return false;
}
}
}
}
| 35.414634 | 156 | 0.495351 | [
"MIT"
] | Xirobod/segurachain | SeguraChain/SeguraChain-Lib/Algorithm/ClassAes.cs | 5,810 | C# |
namespace libMC.NET.Entities {
public class Block {
public int Id;
public string Name;
public int X;
public int Y;
public int Z;
public int Cx;
public int Cz;
public Block(int id, int x, int y, int z,int cx, int cz)
{
Id = id;
X = x;
Y = y;
Z = z;
Cx = cx;
Cz = cz;
Name = ((Blockitemid)Id).ToString();
}
public enum Blockitemid {
Air,
Stone,
Grass,
Dirt,
CobbleStone,
Planks,
Sapling,
BedRock,
Water,
StationaryWater,
Lava,
StationaryLava,
Sand,
Gravel,
GoldOre,
IronOre,
CoalOre,
Wood,
Leaves,
Sponge,
Glass,
LapisLazuliOre,
LapisLazuliBlock,
Dispenser,
SandStone,
NoteBlock,
Bed,
PoweredRail,
DetectorRail,
StickyPison,
Web,
TallGrass,
DeadBush,
Piston,
PistonHead,
Wool,
PistonExtension,
Dandelion,
Rose,
BrownMushroom,
RedMushroom,
GoldBlock,
IronBlock,
DoubleSlabs,
Slabs,
BrickBlock,
Tnt,
BookShelf,
MossStone,
Obsidion,
Torch,
Fire,
MonsterSpawner,
OakwoodStairs,
Chest,
RedStoneWire,
DiamondOre,
DiamondBlock,
CraftingTable,
SeedsPlanted,
Farmland,
Furnace,
BurningFurnace,
SignPost,
WoodenDoor,
Ladders,
Rails,
CobbleStoneStairs,
WallSign,
Lever,
StonePresurePlate,
IronDoor,
WoodenPressurePlate,
RedStoneOre,
GlowingRedStoneOre,
RedStoneTorchoff,
RedStoneTorchon,
StoneButton,
Snow,
Ice,
SnowBlock,
Cactus,
ClayBlock,
SugarCane,
JukeBox,
Fence,
Pumpkin,
Netherrack,
SoulSand,
GlowStoneBlock,
Portal,
JackOLantern,
CakeBlock,
RedStoneRepeateroff,
RedStoneRepeateron,
StainedGlass,
Trapdoor,
MonsterEgg,
StoneBricks,
HugeBrownMushroom,
HugeRedMushroom,
IronBars,
GlassPane,
Melon,
PumpkinStem,
MelonStem,
Vines,
FenceGate,
BrickStairs,
StoneBrickStairs,
Mycelium,
LilyPad,
NetherBrickBlock,
NetherBrickFence,
NetherBrickStairs,
NetherWart,
EnchantmentTable,
BrewingStand,
Cauldron,
EndPortal,
EndPortalFrame,
EndStone,
DragonEgg,
RedstoneLampoff,
RedstoneLampon,
WoodenDoubleSlab,
WoodenSlab,
Cocoa,
SandstoneStairs,
EmeraldOre,
EnderChest,
TripwireHook,
Tripwire,
EmeraldBlock,
SpruceStairs,
BirchStairs,
JungleStairs,
CommandBlock,
Beacon,
CobblestoneWall,
FlowerPot,
Carrots,
Potatos,
WoodenButton,
MobHead,
Anvil,
TrappedChest,
LightweightPressurePlate,
HeavyPressurePlate,
RedstoneComparator,
Unused,
DaylightSensor,
BlockofRedstone,
NetherQuartzOre,
Hopper,
QuartzBlock,
QuartzStairs,
ActivatorRail,
Dropper,
StainedClay,
StainedGlassPane,
Leaves2,
Log,
AcaciaStairs,
DarkOakStairs,
HayBlock = 170,
Carpet,
HardenedClay,
CoalBlock,
PackedIce,
DoublePlant,
IronShovel = 256,
IronPickAxe,
IronAxe,
FlintAndSteel,
Apple,
Bow,
Arrow,
Coal,
Diamond,
IronIngot,
GoldIngot,
IronSword,
WoodenSword,
WoodenShovel,
WoodenPickAxe,
WoodenAxe,
StoneSword,
StoneShovel,
StonePickaxe,
StoneAxe,
DiamondSword,
DiamondShovel,
DiamondPickAxe,
DiamondAxe,
Stick,
Bowl,
MushroomSoup,
GoldSword,
GoldShovel,
GoldPickAxe,
GoldAxe,
StringItem,
Feather,
GunPowder,
WoodenHoe,
StoneHoe,
IronHoe,
DiamondHoe,
GoldHoe,
Seeds,
Wheat,
Bread,
LeatherCap,
LeatherTunic,
LeatherPants,
LeatherBoots,
ChainHelmet,
ChainChestplate,
ChainLeggings,
ChainBoots,
IronHelmet,
IronChestplate,
IronLeggings,
IronBoots,
DiamondHelmet,
DiamondChestplate,
DiamondLeggings,
DiamondBoots,
GoldHelmet,
GoldChestplate,
GoldLeggings,
GoldBoots,
Flint,
RawPorkchop,
CookedPorkchop,
Painting,
GoldenApple,
Sign,
WoodenDoorItem,
Bucket,
WaterBucket,
LavaBucket,
Minecart,
Saddle,
IronCoor,
RedStone,
SnowBall,
Boat,
Leather,
Milk,
ClayBrick,
Clay,
SugarCaneItem,
Paper,
Book,
SlimeBall,
StorageMinecart,
PoweredMinecart,
Egg,
Compass,
FishingRod,
Clock,
GlowStoneDust,
RawFish,
CookedFish,
Dye,
Bone,
Sugar,
Cake,
BedItem,
RedstoneRepeater,
Cookie,
Map,
Shears,
MelonSlice,
PumpkinSeeds,
MelonSeeds,
RawBeef,
Steak,
RawChicken,
CookedChicken,
RottenFlesh,
EnderPearl,
BlazeRod,
GhastTear,
GoldNugget,
NetherWartItem,
Potions,
GlassBottle,
SpiderEye,
FermentedSpiderEye,
BlazePowder,
MagmaCream,
BrewingStandItem,
CauldronItem,
EyeofEnder,
GlisteringMelon,
SpawnEgg,
EnchantingBottle,
FireCharge,
BookNQuill,
WrittenBook,
EmeraldItem,
ItemFrame,
FlowerPotItem,
Carrot,
Potato,
BakedPotato,
PoisonPotato,
EmptyMap,
GoldenCarrot,
MobHeadItem,
CarrotonStick,
NetherStar,
PumpkinPie,
FireworkRocket,
FireworkStar,
EnchantedBook,
RedstoneComparatorItem,
NetherBrick,
NetherQuartz,
TntCart,
HopperCart,
IronHorseArmor,
GoldHorseArmor,
DiamondHorseArmor,
Lead,
NameTag,
CommandBlockMinecart,
Disk13 = 2256,
CatDisk,
BlocksDisk,
ChirpDisk,
FarDisk,
MallDisk,
MellohiDisk,
StalDisk,
StradDisk,
WardDisk,
Disk11,
WaitDisk
}
}
}
| 22.300268 | 63 | 0.42943 | [
"BSD-3-Clause"
] | umby24/libMC.NET | Entities/Block.cs | 8,320 | C# |
using System;
using UnityEditor;
namespace Entitas.VisualDebugging.Unity.Editor {
public class CharTypeDrawer : ITypeDrawer {
public bool HandlesType(Type type) {
return type == typeof(char);
}
public object DrawAndGetNewValue(Type memberType, string memberName, object value, object target) {
var str = EditorGUILayout.TextField(memberName, ((char)value).ToString());
return str.Length > 0 ? str[0] : default(char);
}
}
}
| 27.944444 | 107 | 0.646123 | [
"MIT"
] | ADADebug/Entitas-CSharp | Addons/Entitas.VisualDebugging.Unity.Editor/Entitas.VisualDebugging.Unity.Editor/Entity/TypeDrawer/CharTypeDrawer.cs | 503 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ShadyNagy.DapperManager.Tests.Constants
{
internal static class DatabaseConstants
{
internal static string CONNECTION_STRING = "Database=TestDatabase;Server=.\\SQLEXPRESS;Trusted_Connection=true;";
}
}
| 24.285714 | 117 | 0.797059 | [
"MIT"
] | ShadyNagy/ShadyNagy.DapperManager | tests/ShadyNagy.DapperManager.Tests/Constants/DatabaseConstants.cs | 342 | C# |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.18444
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SampleSimpleStateMachineNETMF
{
internal partial class Resources
{
private static System.Resources.ResourceManager manager;
internal static System.Resources.ResourceManager ResourceManager
{
get
{
if ((Resources.manager == null))
{
Resources.manager = new System.Resources.ResourceManager("SampleSimpleStateMachineNETMF.Resources", typeof(Resources).Assembly);
}
return Resources.manager;
}
}
internal static Microsoft.SPOT.Font GetFont(Resources.FontResources id)
{
return ((Microsoft.SPOT.Font)(Microsoft.SPOT.ResourceUtility.GetObject(ResourceManager, id)));
}
[System.SerializableAttribute()]
internal enum FontResources : short
{
small = 13070,
}
}
}
| 34.615385 | 149 | 0.514815 | [
"BSD-3-Clause"
] | StaMa-StateMachine/StaMa | Samples/netmf/SampleSimpleStateMachineNETMF/Resources.Designer.cs | 1,350 | C# |
/*
* API v1
*
* DocSpring is a service that helps you fill out and sign PDF templates.
*
* OpenAPI spec version: v1
*
* Generated by: https://github.com/openapitools/openapi-generator.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
using OpenAPIDateConverter = DocSpring.Client.Client.OpenAPIDateConverter;
namespace DocSpring.Client.Model
{
/// <summary>
/// ListSubmissionsResponse
/// </summary>
[DataContract]
public partial class ListSubmissionsResponse : IEquatable<ListSubmissionsResponse>, IValidatableObject
{
/// <summary>
/// Initializes a new instance of the <see cref="ListSubmissionsResponse" /> class.
/// </summary>
/// <param name="nextCursor">nextCursor.</param>
/// <param name="submissions">submissions.</param>
/// <param name="limit">limit.</param>
public ListSubmissionsResponse(string nextCursor = default(string), List<Submission> submissions = default(List<Submission>), decimal? limit = default(decimal?))
{
this.NextCursor = nextCursor;
this.Submissions = submissions;
this.Limit = limit;
}
/// <summary>
/// Gets or Sets NextCursor
/// </summary>
[DataMember(Name="next_cursor", EmitDefaultValue=false)]
public string NextCursor { get; set; }
/// <summary>
/// Gets or Sets Submissions
/// </summary>
[DataMember(Name="submissions", EmitDefaultValue=false)]
public List<Submission> Submissions { get; set; }
/// <summary>
/// Gets or Sets Limit
/// </summary>
[DataMember(Name="limit", EmitDefaultValue=false)]
public decimal? Limit { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class ListSubmissionsResponse {\n");
sb.Append(" NextCursor: ").Append(NextCursor).Append("\n");
sb.Append(" Submissions: ").Append(Submissions).Append("\n");
sb.Append(" Limit: ").Append(Limit).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as ListSubmissionsResponse);
}
/// <summary>
/// Returns true if ListSubmissionsResponse instances are equal
/// </summary>
/// <param name="input">Instance of ListSubmissionsResponse to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(ListSubmissionsResponse input)
{
if (input == null)
return false;
return
(
this.NextCursor == input.NextCursor ||
(this.NextCursor != null &&
this.NextCursor.Equals(input.NextCursor))
) &&
(
this.Submissions == input.Submissions ||
this.Submissions != null &&
this.Submissions.SequenceEqual(input.Submissions)
) &&
(
this.Limit == input.Limit ||
(this.Limit != null &&
this.Limit.Equals(input.Limit))
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
if (this.NextCursor != null)
hashCode = hashCode * 59 + this.NextCursor.GetHashCode();
if (this.Submissions != null)
hashCode = hashCode * 59 + this.Submissions.GetHashCode();
if (this.Limit != null)
hashCode = hashCode * 59 + this.Limit.GetHashCode();
return hashCode;
}
}
/// <summary>
/// To validate all properties of the instance
/// </summary>
/// <param name="validationContext">Validation context</param>
/// <returns>Validation Result</returns>
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| 34.579618 | 169 | 0.565297 | [
"MIT"
] | DocSpring/docspring-csharp | src/DocSpring.Client/Model/ListSubmissionsResponse.cs | 5,429 | C# |
//
// System.Runtime.InteropServices.ArrayWithOffset.cs
//
// Author:
// Miguel de Icaza ([email protected])
//
// (C) Novell, Inc. http://www.ximian.com
//
//
// Copyright (C) 2004 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
namespace System.Runtime.InteropServices {
#if NET_2_0
[Serializable]
[ComVisible (true)]
#endif
public struct ArrayWithOffset {
object array;
int offset;
public ArrayWithOffset (object array, int offset)
{
this.array = array;
this.offset = offset;
}
public override bool Equals (object obj)
{
if (obj == null)
return false;
if (!(obj is ArrayWithOffset))
return false;
ArrayWithOffset other = (ArrayWithOffset) obj;
return (other.array == array && other.offset == offset);
}
public bool Equals (ArrayWithOffset obj)
{
return obj.array == array && obj.offset == offset;
}
#if NET_2_0
public static bool operator == (ArrayWithOffset a, ArrayWithOffset b)
{
return a.Equals (b);
}
public static bool operator != (ArrayWithOffset a, ArrayWithOffset b)
{
return !a.Equals (b);
}
#endif
public override int GetHashCode ()
{
return offset;
}
public object GetArray ()
{
return array;
}
public int GetOffset ()
{
return offset;
}
}
}
| 25.159574 | 73 | 0.702326 | [
"MIT"
] | zlxy/Genesis-3D | Engine/extlibs/IosLibs/mono-2.6.7/mcs/class/corlib/System.Runtime.InteropServices/ArrayWithOffset.cs | 2,365 | C# |
using Acr.UserDialogs;
using MeuCondominio.Services;
using System;
using Xamarin.Forms;
using Xamarin.Forms.Xaml;
namespace MeuCondominio
{
[XamlCompilation(XamlCompilationOptions.Compile)]
public partial class BoardPage : ContentPage
{
ViewModels.BoardViewModel vm;
public BoardPage ()
{
InitializeComponent();
vm = new ViewModels.BoardViewModel();
BindingContext = vm;
}
protected async override void OnAppearing()
{
using (UserDialogs.Instance.Loading("Carregando..."))
{
await vm.LoadMessages();
}
}
private async void btnSend_Clicked(object sender, System.EventArgs e)
{
if(txtMessage.Text.Trim() == "")
{
await App.Current.MainPage.DisplayAlert("Nada foi enviado", "Mensagem não pode estar em branco", "OK");
return;
}
var user = UserService.GetUser();
var msg = new Models.BoardMessage()
{
Content = txtMessage.Text,
DateTimeSent = DateTime.Now,
Sender = user.Name,
UserId = user.UserId
};
await BoardService.NewMessage(msg);
txtMessage.Text = "";
using (UserDialogs.Instance.Loading("Carregando..."))
{
await vm.LoadMessages();
}
}
private async void lvMessages_Refreshing(object sender, System.EventArgs e)
{
await vm.LoadMessages();
lvMessages.IsRefreshing = false;
}
}
} | 26.238095 | 119 | 0.547489 | [
"MIT"
] | idenardi/meucondominio | MeuCondominio/MeuCondominio/BoardPage.xaml.cs | 1,656 | C# |
using Common;
namespace Gamekit3D.Network
{
public partial class Incoming
{
private void OnRecvDie(IChannel channel, Message message)
{
SSpriteDie msg = message as SSpriteDie;
NetworkEntity target = networkEntities[msg.entityId];
target.behavior.Die();
}
}
}
| 22.2 | 65 | 0.615616 | [
"Apache-2.0"
] | Scottdyt/3DGamekitWeb | Frontend/Assets/3DGamekit/Scripts/Network/Incoming/OnRecvDie.cs | 335 | C# |
/*
* Module ID:<ExcelAddIn4>
* Function:<OutputContainer>
* Author:Peter.uhm ([email protected])
* Modify date:2016.04
* Modify date:2016.09
* Version : 2.0.0.2
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.Data.SqlClient;
using System.Configuration;
using System.Text.RegularExpressions;
using System.Reflection;
using System.Diagnostics;
using ExcelAddIn4.Common;
using System.IO;
using System.Xml;
namespace ExcelAddIn4
{
public partial class OutputContainer : UserControl
{
/// <summary>
///
/// </summary>
internal DataGridView dgvLD;
/// <summary>
///
/// </summary>
internal DataGridView dgvSaveOptions;
/// <summary>
///
/// </summary>
internal DataGridView dgvCreateTextFile;
/// <summary>
///
/// </summary>
internal static string filter;
/// <summary>
///
/// </summary>
internal List<Specialist> finallist = new List<Specialist>();
/// <summary>
///
/// </summary>
internal List<ExcelAddIn4.Common2.Specialist> TransUpdFinallist = new List<ExcelAddIn4.Common2.Specialist>();
/// <summary>
///
/// </summary>
internal List<RowCreateTextFile> finallistCTF = new List<RowCreateTextFile>();
/// <summary>
///
/// </summary>
internal static bool isTransUpdFlag = false;
/// <summary>
///
/// </summary>
internal static string searchStatus = string.Empty;
/// <summary>
///
/// </summary>
internal static string updateStatus = string.Empty;
/// <summary>
///
/// </summary>
internal static string updateStatusForPost = string.Empty;
/// <summary>
///
/// </summary>
//bool canChange = true;
/// <summary>
///
/// </summary>
internal List<KeyValuePair<int, string>> NameValueCollection;
/// <summary>
///
/// </summary>
internal List<KeyValuePair<int, string>> NameValueUpdate;
/// <summary>
///
/// </summary>
internal bool isFromBindingFlag = false;
/// <summary>
///
/// </summary>
internal static Finance_Tools ft
{
get { return new Finance_Tools(); }
}
/// <summary>
///
/// </summary>
internal static RSFinanceToolsEntities db
{
get { return new RSFinanceToolsEntities(); }
}
public bool bTab0HasLoad = false;
public bool bTab1HasLoad = false;
public bool bTab2HasLoad = false;
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
public void tabControl1_SelectedIndexChanged(object sender, EventArgs e)
{
if (this.tabControl1.SelectedIndex == 1 && bTab1HasLoad == false)
{
//DataTable dt = ft.GetReportsViaTemplatePath();
//if (dt.Rows.Count > 0) canChange = false;
//BindSaveOptions();
BasePage.VerifyWriteButton(SessionInfo.UserInfo.FileName, this.button2);
bTab1HasLoad = true;
}
else if (this.tabControl1.SelectedIndex == 0 && bTab0HasLoad == false)
{
BasePage.VerifyWriteButton(SessionInfo.UserInfo.FileName, this.button1);
BasePage.VerifyWriteButton(SessionInfo.UserInfo.FileName, this.btnTestJournal);
BasePage.VerifyWriteButton(SessionInfo.UserInfo.FileName, this.btnSetMax);
BasePage.VerifyWriteButton(SessionInfo.UserInfo.FileName, button3);
bTab0HasLoad = true;
}
else if (this.tabControl1.SelectedIndex == 2 && bTab2HasLoad == false)
{
BasePage.VerifyWriteButton(SessionInfo.UserInfo.FileName, this.btnTestCTF);
BasePage.VerifyWriteButton(SessionInfo.UserInfo.FileName, this.CTF_btnSave);
bTab2HasLoad = true;
}
}
/// <summary>
///
/// </summary>
public OutputContainer()
{
InitializeComponent();
BindLineDetailDGV();
BindSaveOptions();
}
/// <summary>
///
/// </summary>
/// <param name="dt"></param>
public void BindCreateTextFileDGV(DataTable dt)
{
try
{
dgvCreateTextFile = new DataGridView();
dgvCreateTextFile.Columns.Clear();
dgvCreateTextFile.Columns.Add("ReferenceNumber", "Ref");
dgvCreateTextFile.Columns["ReferenceNumber"].DataPropertyName = "ReferenceNumber";
dgvCreateTextFile.Columns.Add("LineIndicator", "Line Indicator");
dgvCreateTextFile.Columns["LineIndicator"].DataPropertyName = "LineIndicator";
dgvCreateTextFile.Columns.Add("StartinginCell", "StartinginCell");
dgvCreateTextFile.Columns["StartinginCell"].DataPropertyName = "StartinginCell";
if (!string.IsNullOrEmpty(dt.Rows[0]["TextFileName"].ToString()))
{
DataGridViewButtonColumn dgvb = new DataGridViewButtonColumn();
dgvb.HeaderText = "SavePath";
dgvb.Name = "SavePath";
dgvb.Text = "Browse...";
dgvb.DataPropertyName = "SavePath";
dgvCreateTextFile.Columns.Add(dgvb);
dgvCreateTextFile.Columns.Add("SaveName", "SaveName");
dgvCreateTextFile.Columns["SaveName"].DataPropertyName = "SaveName";
DataGridViewComboBoxColumn combox = new DataGridViewComboBoxColumn();
combox.HeaderText = "IncludeHeaderRow";
combox.Name = "IncludeHeaderRow";
combox.Items.Add("True");
combox.Items.Add("False");
combox.DataPropertyName = "IncludeHeaderRow";
combox.SortMode = DataGridViewColumnSortMode.NotSortable;
dgvCreateTextFile.Columns.Add(combox);
}
int count = 1;
for (int i = 0; i < dt.Rows.Count; i++)
{
if (bool.Parse(dt.Rows[i]["Visible"].ToString()) == true)
{
if (dgvCreateTextFile.Columns.Contains(dt.Rows[i]["Field"].ToString()))
{
dgvCreateTextFile.Columns.Add(dt.Rows[i]["Field"].ToString() + count, dt.Rows[i]["FriendlyName"].ToString());
dgvCreateTextFile.Columns[dt.Rows[i]["Field"].ToString() + count].DataPropertyName = "Column" + count;
dgvCreateTextFile.Columns[dt.Rows[i]["Field"].ToString() + count].Tag = dt.Rows[i]["DefaultValue"].ToString() + ",,," + dt.Rows[i]["Mandatory"].ToString() + ",,," + dt.Rows[i]["Separator"].ToString() + ",,," + dt.Rows[i]["TextLength"].ToString() + ",,," + dt.Rows[i]["Prefix"].ToString() + ",,," + dt.Rows[i]["Suffix"].ToString() + ",,," + dt.Rows[i]["RemoveCharacters"].ToString() + ",,," + dt.Rows[i]["Parent"].ToString() + ",,,";
}
else
{
dgvCreateTextFile.Columns.Add(dt.Rows[i]["Field"].ToString(), dt.Rows[i]["FriendlyName"].ToString());
dgvCreateTextFile.Columns[dt.Rows[i]["Field"].ToString()].DataPropertyName = "Column" + count;
dgvCreateTextFile.Columns[dt.Rows[i]["Field"].ToString()].Tag = dt.Rows[i]["DefaultValue"].ToString() + ",,," + dt.Rows[i]["Mandatory"].ToString() + ",,," + dt.Rows[i]["Separator"].ToString() + ",,," + dt.Rows[i]["TextLength"].ToString() + ",,," + dt.Rows[i]["Prefix"].ToString() + ",,," + dt.Rows[i]["Suffix"].ToString() + ",,," + dt.Rows[i]["RemoveCharacters"].ToString() + ",,," + dt.Rows[i]["Parent"].ToString() + ",,,";
}
count++;
}
}
dgvCreateTextFile.ColumnHeadersHeightSizeMode = DataGridViewColumnHeadersHeightSizeMode.EnableResizing;
dgvCreateTextFile.AutoGenerateColumns = false;
dgvCreateTextFile.ColumnHeadersHeight = 40;
dgvCreateTextFile.Dock = DockStyle.Fill;
for (int i = 0; i < dgvCreateTextFile.Columns.Count; i++)
dgvCreateTextFile.Columns[i].Width = 55;
dgvCreateTextFile.Columns["ReferenceNumber"].Width = 35;
dgvCreateTextFile.AllowUserToAddRows = true;
dgvCreateTextFile.CellDoubleClick += new DataGridViewCellEventHandler(dgvctf_CellMouseDoubleClick);
dgvCreateTextFile.CellClick += new DataGridViewCellEventHandler(dgvctf_CellMouseClick);
dgvCreateTextFile.RowPostPaint += new DataGridViewRowPostPaintEventHandler(dgvctf_RowPostPaint);
dgvCreateTextFile.CellFormatting += new DataGridViewCellFormattingEventHandler(dgvCreateTextFile_CellFormatting);
dgvCreateTextFile.NotifyCurrentCellDirty(false);
dgvCreateTextFile.DataError += new DataGridViewDataErrorEventHandler(dgvctf_DataError);
dgvCreateTextFile.EditMode = DataGridViewEditMode.EditOnKeystroke;
dgvCreateTextFile.KeyDown += new KeyEventHandler(dgvctf_KeyDown);
dgvCreateTextFile.CellContentClick += new DataGridViewCellEventHandler(dgvCreateTextFile_CellContentClick);
((System.ComponentModel.ISupportInitialize)(this.dgvCreateTextFile)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.dataGridViewColumnHeaderEditor1)).BeginInit();
this.dataGridViewColumnHeaderEditor1.TargetControl = this.dgvCreateTextFile;
BindDataCTF();
this.panel17.AutoSize = true;
this.panel17.Controls.Clear();
this.panel17.Controls.Add(dgvCreateTextFile);
dgvCreateTextFile.RowHeadersWidth = 55;
((System.ComponentModel.ISupportInitialize)(this.dgvCreateTextFile)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.dataGridViewColumnHeaderEditor1)).EndInit();
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, " From Create Text File Tab, Output settings Error");
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + " From Create Text File Tab, Output settings Error");
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvCreateTextFile_CellContentClick(object sender, DataGridViewCellEventArgs e)
{
if ((e.ColumnIndex == 3) && (e.RowIndex != -1) && cbXMLOrText.Text != "XML")
{
DialogResult drctf = fbdAd_UpdateFolder.ShowDialog();
if (drctf == DialogResult.OK)
{
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value = fbdAd_UpdateFolder.SelectedPath;
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].ToolTipText = fbdAd_UpdateFolder.SelectedPath;
}
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvCreateTextFile_CellFormatting(object sender, DataGridViewCellFormattingEventArgs e)
{
if (e.ColumnIndex == 0)
dgvCreateTextFile.Rows[e.RowIndex].Cells[e.ColumnIndex].Style.BackColor = dgvCreateTextFile.RowHeadersDefaultCellStyle.BackColor;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvctf_DataError(object sender, DataGridViewDataErrorEventArgs e) { }
/// <summary>
///
/// </summary>
private void BindDataCTF()
{
DataTable dt = new DataTable();
if (cbXMLOrText.Text == "XML")
dt = ft.GetCreateTextFileDataFromDB(cbItems.Text.Substring(0, cbItems.Text.LastIndexOf(",")), cbItems.Text.Substring(cbItems.Text.LastIndexOf(",") + 1));
else
dt = ft.GetCreateTextFileDataFromDB(cbItems.Text);
if (dt.Rows.Count > 0)
{
List<RowCreateTextFile> list = GetCTFDataList(dt);
for (int k = 0; k < list.Count; k++)
dgvCreateTextFile.Rows.Add(new DataGridViewRow());
for (int j = 0; j < dt.Rows.Count; j++)
for (int i = 0; i < dgvCreateTextFile.Columns.Count; i++)
dgvCreateTextFile.Rows[j].Cells[i].Value = DataConversionTools.GetPropertyValue(dgvCreateTextFile.Columns[i].DataPropertyName, list[j]);//initialize operation data in dgvCreateTextFile dgv
}
if (cbXMLOrText.Text == "XML")
for (int i = 0; i < dgvCreateTextFile.Columns.Count; i++)
{
string type = ft.GetSectionFromDB(cbItems.Text.Substring(0, cbItems.Text.LastIndexOf(",")), cbItems.Text.Substring(cbItems.Text.LastIndexOf(",") + 1), dgvCreateTextFile.Columns[i].Name, dgvCreateTextFile.Columns[i].HeaderText);
if (type == "Header")
dgvCreateTextFile.Columns[i].DefaultCellStyle.BackColor = System.Drawing.Color.Ivory;
else if (type == "Footer")
dgvCreateTextFile.Columns[i].DefaultCellStyle.BackColor = System.Drawing.Color.Beige;
}
this.panel16.AutoSize = true;
this.panel17.Controls.Add(dgvCreateTextFile);
}
/// <summary>
///
/// </summary>
/// <param name="dt"></param>
/// <returns></returns>
private List<RowCreateTextFile> GetCTFDataList(DataTable dt)
{
List<RowCreateTextFile> list = new List<RowCreateTextFile>();
for (int i = 0; i < dt.Rows.Count; i++)
{
RowCreateTextFile ctfRow = new RowCreateTextFile();
for (int j = 0; j < dt.Columns.Count; j++)
DataConversionTools.SetPropertyValue(dt.Columns[j].ColumnName, dt.Rows[i][j].ToString(), ref ctfRow); //initialize ctfRow
list.Add(ctfRow);
}
return list;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void dgvctf_KeyDown(object sender, KeyEventArgs e)
{
ft.EditingControlWantsInputKey(e.KeyCode, dgvCreateTextFile);
}
/// <summary>
/// Generate datarow number before the Rows
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvctf_RowPostPaint(object sender, DataGridViewRowPostPaintEventArgs e)
{
System.Drawing.Rectangle rectangle = new System.Drawing.Rectangle(e.RowBounds.Location.X,
e.RowBounds.Location.Y,
dgvCreateTextFile.RowHeadersWidth - 4,
e.RowBounds.Height);
TextRenderer.DrawText(e.Graphics, (e.RowIndex + 1).ToString(),
dgvCreateTextFile.RowHeadersDefaultCellStyle.Font,
rectangle,
dgvCreateTextFile.RowHeadersDefaultCellStyle.ForeColor,
TextFormatFlags.VerticalCenter | TextFormatFlags.Right);
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvctf_CellMouseClick(object sender, DataGridViewCellEventArgs e)
{
try
{
if (e.ColumnIndex == 0 || (((e.ColumnIndex == 5) || (e.ColumnIndex == 3)) && cbXMLOrText.Text != "XML"))
return;
if (dgvCreateTextFile[0, e.RowIndex].Value == "" || dgvCreateTextFile[0, e.RowIndex].Value == null) dgvCreateTextFile[0, e.RowIndex].Value = e.RowIndex + 1;
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value = "";
var xlRange = Globals.ThisAddIn.Application.ActiveCell.Address;
if (xlRange != null)
{
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value = xlRange.Replace("$", "");
dgvCreateTextFile.Focus();
}
}
catch { }
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvctf_CellMouseDoubleClick(object sender, DataGridViewCellEventArgs e)
{
try
{
if (e.ColumnIndex == 0 || (((e.ColumnIndex == 5) || (e.ColumnIndex == 3)) && cbXMLOrText.Text != "XML"))
return;
if (dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value != null)
{
if (dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value.ToString().Contains("$"))
{
string KeyWord = dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value.ToString().Replace("$", "");
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value = "";
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value = KeyWord;
}
else
{
string KeyWord = dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value.ToString();
string res = Regex.Replace(KeyWord, @"(\d+)|(\s+) ", " $1 $2 ", RegexOptions.Compiled | RegexOptions.IgnoreCase);
KeyWord = "$" + res.Trim().Replace(" ", "$");
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value = "";
dgvCreateTextFile[e.ColumnIndex, e.RowIndex].Value = KeyWord;
}
dgvCreateTextFile.EndEdit();
}
}
catch { }
}
/// <summary>
///
/// </summary>
//private void BindPDFViewer()
//{
// try
// {
// SessionInfo.UserInfo.Containerpath = (from FT_sett in db.rsTemplateContainers
// where FT_sett.TemplateID == SessionInfo.UserInfo.File_ftid
// select FT_sett.ft_relatefilepath).First();
// string column = (from FT_sett in db.rsTemplateContainers
// where FT_sett.TemplateID == SessionInfo.UserInfo.File_ftid
// select FT_sett.column).First();
// bool? viewFromDB = (from FT_sett in db.rsTemplateContainers
// where FT_sett.TemplateID == SessionInfo.UserInfo.File_ftid
// select FT_sett.FromDB).First();
// }
// catch { }
//}
/// <summary>
///
/// </summary>
public void BindSaveOptions()
{
try
{
dgvSaveOptions = ft.IniSaveOptionsGrd();
dgvSaveOptions.AllowUserToAddRows = true;
dgvSaveOptions.CellDoubleClick += new DataGridViewCellEventHandler(dgvSaveOptions_CellMouseDoubleClick);
dgvSaveOptions.CellClick += new DataGridViewCellEventHandler(dgvSaveOptions_CellMouseClick);
dgvSaveOptions.RowPostPaint += new DataGridViewRowPostPaintEventHandler(dgvSaveOptions_RowPostPaint);
dgvSaveOptions.CellFormatting += new DataGridViewCellFormattingEventHandler(dgvSaveOptions_CellFormatting);
dgvSaveOptions.NotifyCurrentCellDirty(false);
dgvSaveOptions.EditMode = DataGridViewEditMode.EditOnKeystroke;
dgvSaveOptions.KeyDown += new KeyEventHandler(dgvSaveOptions_KeyDown);
BindCriterias();
this.panel7.AutoSize = true;
this.panel7.Controls.Clear();
this.panel7.Controls.Add(dgvSaveOptions);
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, " From SaveOptions Tab, Output settings Error");
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + " From SaveOptions Tab, Output settings Error");
}
}
/// <summary>
///
/// </summary>
public void BindCriterias()
{
DataTable dt = ft.GetReportCriteria(SessionInfo.UserInfo.File_ftid);
for (int k = 0; k < dt.Rows.Count; k++)
dgvSaveOptions.Rows.Add(new DataGridViewRow());
for (int j = 0; j < dt.Rows.Count; j++)
for (int i = 0; i < dgvSaveOptions.Columns.Count; i++)
{
dgvSaveOptions.Rows[j].Cells[i].Value = dt.Rows[j][dgvSaveOptions.Columns[i].DataPropertyName].ToString();
if (!string.IsNullOrEmpty(dgvSaveOptions.Rows[j].Cells[i].Value.ToString()))
{
if ((i != dgvSaveOptions.Columns.Count - 5))//!canChange &&
{
dgvSaveOptions.Rows[j].Cells[i].ReadOnly = true;
dgvSaveOptions.Rows[j].Cells[i].Style.BackColor = System.Drawing.Color.LightGray;
}
}
}
dgvSaveOptions.Columns[0].ReadOnly = true;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvSaveOptions_CellFormatting(object sender, DataGridViewCellFormattingEventArgs e)
{
if (e.ColumnIndex == 0)
dgvSaveOptions.Rows[e.RowIndex].Cells[e.ColumnIndex].Style.BackColor = dgvSaveOptions.RowHeadersDefaultCellStyle.BackColor;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void dgvSaveOptions_KeyDown(object sender, KeyEventArgs e)
{
if (dgvSaveOptions.CurrentCell.Style.BackColor == System.Drawing.Color.LightGray) return;
if (dgvSaveOptions.CurrentCell.ColumnIndex == 0) return;
ft.EditingControlWantsInputKey(e.KeyCode, dgvSaveOptions);
}
/// <summary>
/// Generate datarow number before the Rows
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvSaveOptions_RowPostPaint(object sender, DataGridViewRowPostPaintEventArgs e)
{
System.Drawing.Rectangle rectangle = new System.Drawing.Rectangle(e.RowBounds.Location.X,
e.RowBounds.Location.Y,
dgvSaveOptions.RowHeadersWidth - 4,
e.RowBounds.Height);
TextRenderer.DrawText(e.Graphics, (e.RowIndex + 1).ToString(),
dgvSaveOptions.RowHeadersDefaultCellStyle.Font,
rectangle,
dgvSaveOptions.RowHeadersDefaultCellStyle.ForeColor,
TextFormatFlags.VerticalCenter | TextFormatFlags.Right);
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvSaveOptions_CellMouseClick(object sender, DataGridViewCellEventArgs e)
{
try
{
if (e.ColumnIndex == 0 || e.ColumnIndex == 1 || e.ColumnIndex == 3 || e.ColumnIndex == 5 || e.ColumnIndex == 7 || e.ColumnIndex == 9 || e.ColumnIndex == 11)
return;
if (dgvSaveOptions.Rows[e.RowIndex].Cells[e.ColumnIndex].Style.BackColor == System.Drawing.Color.LightGray) return;
if (dgvSaveOptions[0, e.RowIndex].Value == "" || dgvSaveOptions[0, e.RowIndex].Value == null) dgvSaveOptions[0, e.RowIndex].Value = e.RowIndex + 1;
dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value = "";
var xlRange = Globals.ThisAddIn.Application.ActiveCell.Address;
if (xlRange != null)
{
dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value = xlRange.Replace("$", "");
dgvSaveOptions.Focus();
}
}
catch { }
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvSaveOptions_CellMouseDoubleClick(object sender, DataGridViewCellEventArgs e)
{
try
{
if (e.ColumnIndex == 0 || e.ColumnIndex == 1 || e.ColumnIndex == 3 || e.ColumnIndex == 5 || e.ColumnIndex == 7 || e.ColumnIndex == 9 || e.ColumnIndex == 11)
return;
if (dgvSaveOptions.Rows[e.RowIndex].Cells[e.ColumnIndex].Style.BackColor == System.Drawing.Color.LightGray) return;
if (dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value != null)
{
if (dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value.ToString().Contains("$"))
{
string KeyWord = dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value.ToString().Replace("$", "");
dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value = "";
dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value = KeyWord;
}
else
{
string KeyWord = dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value.ToString();
string res = Regex.Replace(KeyWord, @"(\d+)|(\s+) ", " $1 $2 ", RegexOptions.Compiled | RegexOptions.IgnoreCase);
KeyWord = "$" + res.Trim().Replace(" ", "$");
dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value = "";
dgvSaveOptions[e.ColumnIndex, e.RowIndex].Value = KeyWord;
}
dgvSaveOptions.EndEdit();
}
}
catch { }
}
/// <summary>
///
/// </summary>
private void BindLineDetailDGV()
{
try
{
dgvLD = ft.IniGrd();
dgvLD.AllowUserToAddRows = true;
dgvLD.CellDoubleClick += new DataGridViewCellEventHandler(dgv_CellMouseDoubleClick);
dgvLD.CellClick += new DataGridViewCellEventHandler(dgv_CellMouseClick);
dgvLD.CellMouseDown += new DataGridViewCellMouseEventHandler(dgvLD_CellMouseDown);
dgvLD.RowPostPaint += new DataGridViewRowPostPaintEventHandler(dgv_RowPostPaint);
dgvLD.CellFormatting += new DataGridViewCellFormattingEventHandler(dgvLD_CellFormatting);
dgvLD.CellValueChanged += new DataGridViewCellEventHandler(dgvLD_CellValueChanged);
dgvLD.NotifyCurrentCellDirty(false);
dgvLD.EditMode = DataGridViewEditMode.EditOnKeystroke;
dgvLD.KeyDown += new KeyEventHandler(dgvLD_KeyDown);
dgvLD.DataBindingComplete += new DataGridViewBindingCompleteEventHandler(dgvLD_DataBindingComplete);
BindData();
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, " From Journal Tab, Output settings Error");
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + " From Journal Tab, Output settings Error");
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvLD_CellValueChanged(object sender, DataGridViewCellEventArgs e)
{
if (e.ColumnIndex == 1)
{
DataTable dt = ft.GetReportCriteriaByRef(SessionInfo.UserInfo.File_ftid, dgvLD.Rows[e.RowIndex].Cells[e.ColumnIndex].FormattedValue.ToString());
if (dt.Rows.Count == 0)
dgvLD.Rows[e.RowIndex].Cells[e.ColumnIndex].ErrorText = "This save reference doesn't exist in Save Options!";
else
dgvLD.Rows[e.RowIndex].Cells[e.ColumnIndex].ErrorText = "";
}
if (e.ColumnIndex == 0)
{
if ((dgvLD.Rows[e.RowIndex].Cells[0].Value.ToString().Trim() != "") && ((dgvLD.Rows[e.RowIndex].Cells[1].Value.ToString().Trim() == "" && dgvLD.Rows[e.RowIndex].Cells[2].Value.ToString().Trim() == "" && dgvLD.Rows[e.RowIndex].Cells[3].Value.ToString().Trim() == "" && dgvLD.Rows[e.RowIndex].Cells[4].Value.ToString().Trim() == "" && dgvLD.Rows[e.RowIndex].Cells[5].Value.ToString().Trim() == "" && dgvLD.Rows[e.RowIndex].Cells[6].Value.ToString().Trim() == "" && dgvLD.Rows[e.RowIndex].Cells[7].Value.ToString().Trim() == "" && dgvLD.Rows[e.RowIndex].Cells[8].Value.ToString().Trim() == "") || (e.RowIndex > bindCount - 1)))
for (int i = 0; i < dgvLD.Rows.Count; i++)
if ((dgvLD.Rows[i].Cells[0].Value != null) && (dgvLD.Rows[i].Cells[0].Value.ToString().Trim() == dgvLD.Rows[e.RowIndex].Cells[0].Value.ToString().Trim()))
{
dgvLD.Rows[e.RowIndex].Cells[1].Value = dgvLD.Rows[i].Cells[1].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[2].Value = dgvLD.Rows[i].Cells[2].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[3].Value = dgvLD.Rows[i].Cells[3].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[4].Value = dgvLD.Rows[i].Cells[4].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[5].Value = dgvLD.Rows[i].Cells[5].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[6].Value = dgvLD.Rows[i].Cells[6].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[7].Value = dgvLD.Rows[i].Cells[7].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[8].Value = dgvLD.Rows[i].Cells[8].Value.ToString();
dgvLD.Rows[e.RowIndex].Cells[9].Value = dgvLD.Rows[i].Cells[9].Value.ToString();
break;
}
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvLD_CellFormatting(object sender, DataGridViewCellFormattingEventArgs e)
{
if (e.ColumnIndex == 0)
dgvLD.Rows[e.RowIndex].Cells[e.ColumnIndex].Style.BackColor = dgvLD.RowHeadersDefaultCellStyle.BackColor;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvLD_DataBindingComplete(object sender, DataGridViewBindingCompleteEventArgs e)
{
try
{
if (isFromBindingFlag)//set consolidation cell yellow in each row
{
foreach (KeyValuePair<int, string> a in NameValueCollection)
{
if (!string.IsNullOrEmpty(a.Value) && (a.Key >= 0))
this.dgvLD.Rows[a.Key].Cells[a.Value].Style.BackColor = Color.Yellow;
}
foreach (KeyValuePair<int, string> a in NameValueUpdate)
{
if (!string.IsNullOrEmpty(a.Value) && (a.Key >= 0))
this.dgvLD.Rows[a.Key].Cells[a.Value].Style.BackColor = Color.Aqua;
}
}
for (int i = 0; i < dgvLD.Rows.Count; i++)
if (dgvLD.Rows[i].Cells[0].Value != null && i < bindCount)
{
DataTable dt = ft.GetTemplateActionByRef(SessionInfo.UserInfo.File_ftid, dgvLD.Rows[i].Cells[0].Value.ToString().Trim());
if (dt.Rows.Count > 0)
{
dgvLD.Rows[i].Cells[0].ToolTipText = "Can't be changed! Some Action(s) are using this process reference.";
}
}
}
catch { }
}
/// <summary>
///
/// </summary>
private int currentColumnIndex = 0;
/// <summary>
///
/// </summary>
private int currentRowIndex = 0;
/// <summary>
/// SHOW CONSOLIDATION/UNSOLIDATION MENUSTRIP WHen right click
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgvLD_CellMouseDown(object sender, DataGridViewCellMouseEventArgs e)
{
try
{
if (e.Button == MouseButtons.Right)
{
currentColumnIndex = e.ColumnIndex;
currentRowIndex = e.RowIndex;
if (e.ColumnIndex >= 10)
{
setMenu(false);
if (dgvLD.Columns[e.ColumnIndex].Selected == false)
{
dgvLD.ClearSelection();
dgvLD.Columns[e.ColumnIndex].Selected = true;
if (DataConversionTools.IsPropertyInClassProperties<Common2.Actions>(dgvLD.Columns[currentColumnIndex].Name))
updateToolStripMenuItem.Visible = true;
else
updateToolStripMenuItem.Visible = false;
}
this.contextMenuStrip1.Show(MousePosition.X, MousePosition.Y);
}
else if (e.ColumnIndex == 0)
{
setMenu(true);
if (copyrow == -2) PasteStripMenuItem.Enabled = false;
else PasteStripMenuItem.Enabled = true;
this.contextMenuStrip1.Show(MousePosition.X, MousePosition.Y);
}
}
}
catch { }
}
/// <summary>
///
/// </summary>
/// <param name="b"></param>
private void setMenu(bool b)
{
if (b)
{
toolStripMenuItem1.Visible = false;
updateToolStripMenuItem.Visible = false;
CopyStripMenuItem.Visible = true;
PasteStripMenuItem.Visible = true;
InsertStripMenuItem.Visible = true;
RemoveStripMenuItem.Visible = true;
}
else
{
toolStripMenuItem1.Visible = true;
updateToolStripMenuItem.Visible = true;
CopyStripMenuItem.Visible = false;
PasteStripMenuItem.Visible = false;
InsertStripMenuItem.Visible = false;
RemoveStripMenuItem.Visible = false;
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void toolStripMenuItem1_Click(object sender, EventArgs e)
{
isFromBindingFlag = false;
if (currentColumnIndex >= 0 && currentRowIndex >= 0 && (dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor != Color.Yellow))
{
int consCount = 0;
int updateCount = 0;
for (int j = 0; j < dgvLD.Columns.Count; j++)
{
if (dgvLD.Rows[currentRowIndex].Cells[j].Style.BackColor == Color.Yellow)
{
consCount++;
if (consCount == 4)
{
MessageBox.Show("Consolidate cells can't exceed 4 cells per row ! - Data error in journal tab, output settings !", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
LogHelper.WriteLog(typeof(OutputContainer), "Consolidate cells can't exceed 4 cells per row ! - Data error in journal tab, output settings !");
return;
}
}
if (dgvLD.Rows[currentRowIndex].Cells[j].Style.BackColor == Color.Aqua)
{
updateCount++;
if (updateCount == 1)
{
MessageBox.Show("Update cells and consolidate cells should in different rows ! - Data error in journal tab, output settings !", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
LogHelper.WriteLog(typeof(OutputContainer), "Update cells and consolidate cells should in different rows ! - Data error in journal tab, output settings !");
return;
}
}
}
dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor = Color.Yellow;
NameValueCollection.Add(new KeyValuePair<int, string>(currentRowIndex, dgvLD.Columns[currentColumnIndex].Name));
}
else if (currentColumnIndex >= 0 && currentRowIndex >= 0 && (dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor == Color.Yellow))
{
dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor = Color.Empty;
NameValueCollection.Remove(new KeyValuePair<int, string>(currentRowIndex, dgvLD.Columns[currentColumnIndex].Name));
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void updateToolStripMenuItem_Click(object sender, EventArgs e)
{
isFromBindingFlag = false;
if (currentColumnIndex >= 0 && currentRowIndex >= 0 && (dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor != Color.Aqua))
{
int consCount = 0;
for (int j = 0; j < dgvLD.Columns.Count; j++)
{
if (dgvLD.Rows[currentRowIndex].Cells[j].Style.BackColor == Color.Yellow)
{
consCount++;
if (consCount == 1)
{
MessageBox.Show("Update cells and consolidate cells should in different rows ! - Data error in journal tab, output settings !", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
LogHelper.WriteLog(typeof(OutputContainer), "Update cells and consolidate cells should in different rows ! - Data error in journal tab, output settings !");
return;
}
}
}
dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor = Color.Aqua;
NameValueUpdate.Add(new KeyValuePair<int, string>(currentRowIndex, dgvLD.Columns[currentColumnIndex].Name));
}
else if (currentColumnIndex >= 0 && currentRowIndex >= 0 && (dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor == Color.Aqua))
{
dgvLD.Rows[currentRowIndex].Cells[currentColumnIndex].Style.BackColor = Color.Empty;
NameValueUpdate.Remove(new KeyValuePair<int, string>(currentRowIndex, dgvLD.Columns[currentColumnIndex].Name));
}
}
int copyrow = -2;
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void CopyStripMenuItem_Click(object sender, EventArgs e)
{
copyrow = currentRowIndex;
}
/// <summary>
///
/// </summary>
private void reduceBinding()
{
dgvLD.DataBindingComplete -= new DataGridViewBindingCompleteEventHandler(dgvLD_DataBindingComplete);
dgvLD.CellValueChanged -= new DataGridViewCellEventHandler(dgvLD_CellValueChanged);
dgvLD.RowPostPaint -= new DataGridViewRowPostPaintEventHandler(dgv_RowPostPaint);
dgvLD.CellFormatting -= new DataGridViewCellFormattingEventHandler(dgvLD_CellFormatting);
}
/// <summary>
///
/// </summary>
private void plusBinding()
{
dgvLD.DataBindingComplete += new DataGridViewBindingCompleteEventHandler(dgvLD_DataBindingComplete);
dgvLD.CellValueChanged += new DataGridViewCellEventHandler(dgvLD_CellValueChanged);
dgvLD.RowPostPaint += new DataGridViewRowPostPaintEventHandler(dgv_RowPostPaint);
dgvLD.CellFormatting += new DataGridViewCellFormattingEventHandler(dgvLD_CellFormatting);
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void PasteStripMenuItem_Click(object sender, EventArgs e)
{
try
{
reduceBinding();
for (int i = 0; i < dgvLD.Columns.Count; i++)
if (dgvLD[i, currentRowIndex].Visible)
dgvLD[i, currentRowIndex].Value = dgvLD[i, copyrow].Value;
}
catch { }
finally
{
plusBinding();
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void InsertStripMenuItem_Click(object sender, EventArgs e)
{
reduceBinding();
DataTable dt = (DataTable)this.dgvLD.DataSource;
DataRow dr = dt.NewRow();
dt.Rows.InsertAt(dr, currentRowIndex);
this.dgvLD.DataSource = dt;
bindCount = dt.Rows.Count;
plusBinding();
ChangeKeyValuePair(currentRowIndex, 1);
if (copyrow != -2 && copyrow >= currentRowIndex) copyrow++;
}
/// <summary>
///
/// </summary>
/// <param name="currentRowIndex"></param>
/// <param name="number"></param>
private void ChangeKeyValuePair(int currentRowIndex, int number)
{
List<KeyValuePair<int, string>> templist = new List<KeyValuePair<int, string>>();
foreach (KeyValuePair<int, string> a in NameValueCollection)
{
if ((a.Key > currentRowIndex) || ((a.Key == currentRowIndex) && number > 0))
{
if (!string.IsNullOrEmpty(a.Value))
{
this.dgvLD.Rows[a.Key].Cells[a.Value].Style.BackColor = Color.Empty;
int key = a.Key + number;
string value = a.Value;
templist.Add(new KeyValuePair<int, string>(key, value));
}
}
else if ((a.Key == currentRowIndex) && number < 0)
{ }
else
templist.Add(a);
}
NameValueCollection = templist;
templist = new List<KeyValuePair<int, string>>();
foreach (KeyValuePair<int, string> a in NameValueUpdate)
{
if ((a.Key > currentRowIndex) || ((a.Key == currentRowIndex) && number > 0))
{
if (!string.IsNullOrEmpty(a.Value))
{
this.dgvLD.Rows[a.Key].Cells[a.Value].Style.BackColor = Color.Empty;
int key = a.Key + number;
string value = a.Value;
templist.Add(new KeyValuePair<int, string>(key, value));
}
}
else if ((a.Key == currentRowIndex) && number < 0)
{ }
else
templist.Add(a);
}
NameValueUpdate = templist;
isFromBindingFlag = true;
dgvLD_DataBindingComplete(null, null);
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void RemoveStripMenuItem_Click(object sender, EventArgs e)
{
try
{
reduceBinding();
DataTable dt = (DataTable)this.dgvLD.DataSource;
dt.Rows.RemoveAt(currentRowIndex);
this.dgvLD.DataSource = dt;
bindCount = dt.Rows.Count;
plusBinding();
ChangeKeyValuePair(currentRowIndex, -1);
if (copyrow != -2 && copyrow > currentRowIndex)
copyrow--;
else if (copyrow == currentRowIndex)
copyrow = -2;
}
catch { }
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
void dgvLD_KeyDown(object sender, KeyEventArgs e)
{
ft.EditingControlWantsInputKey(e.KeyCode, dgvLD);
}
/// <summary>
/// Generate datarow number before the Rows
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgv_RowPostPaint(object sender, DataGridViewRowPostPaintEventArgs e)
{
System.Drawing.Rectangle rectangle = new System.Drawing.Rectangle(e.RowBounds.Location.X,
e.RowBounds.Location.Y,
dgvLD.RowHeadersWidth - 4,
e.RowBounds.Height);
TextRenderer.DrawText(e.Graphics, (e.RowIndex + 1).ToString(),
dgvLD.RowHeadersDefaultCellStyle.Font,
rectangle,
dgvLD.RowHeadersDefaultCellStyle.ForeColor,
TextFormatFlags.VerticalCenter | TextFormatFlags.Right);
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgv_CellMouseClick(object sender, DataGridViewCellEventArgs e)
{
try
{
if (e.ColumnIndex == 0 || (e.ColumnIndex == 2) || (e.ColumnIndex == 3) || (e.ColumnIndex == 4))
return;
if (dgvLD[0, e.RowIndex].Value == "" || dgvLD[0, e.RowIndex].Value == null || dgvLD[0, e.RowIndex].Value.ToString() == "") dgvLD[0, e.RowIndex].Value = e.RowIndex + 1;
dgvLD[e.ColumnIndex, e.RowIndex].Value = "";
var xlRange = Globals.ThisAddIn.Application.ActiveCell.Address;
if (xlRange != null)
{
dgvLD[e.ColumnIndex, e.RowIndex].Value = xlRange.Replace("$", "");
dgvLD.Focus();
}
}
catch { }
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void dgv_CellMouseDoubleClick(object sender, DataGridViewCellEventArgs e)
{
if (e.ColumnIndex == 0 || (e.ColumnIndex == 2) || (e.ColumnIndex == 3) || (e.ColumnIndex == 4))
return;
try
{
if (dgvLD[e.ColumnIndex, e.RowIndex].Value != null)
{
if (dgvLD[e.ColumnIndex, e.RowIndex].Value.ToString().Contains("$"))
{
string KeyWord = dgvLD[e.ColumnIndex, e.RowIndex].Value.ToString().Replace("$", "");
dgvLD[e.ColumnIndex, e.RowIndex].Value = "";
dgvLD[e.ColumnIndex, e.RowIndex].Value = KeyWord;
}
else
{
string KeyWord = dgvLD[e.ColumnIndex, e.RowIndex].Value.ToString();
string res = Regex.Replace(KeyWord, @"(\d+)|(\s+) ", " $1 $2 ", RegexOptions.Compiled | RegexOptions.IgnoreCase);
KeyWord = "$" + res.Trim().Replace(" ", "$");
dgvLD[e.ColumnIndex, e.RowIndex].Value = "";
dgvLD[e.ColumnIndex, e.RowIndex].Value = KeyWord;
}
dgvLD.EndEdit();
}
}
catch { }
}
/// <summary>
///
/// </summary>
private void BindData()
{
if (NameValueCollection == null) NameValueCollection = new List<KeyValuePair<int, string>>();
if (NameValueUpdate == null) NameValueUpdate = new List<KeyValuePair<int, string>>();
DataTable refdt = ft.GetAllJournalRefOfTemplate();
DataTable dtfinal = DataConversionTools.ConvertToDataTableStructure<rsTemplateJournal>();
foreach (DataRow dr in refdt.Rows)
{
string refnum = dr["references"].ToString().Trim();
DataTable dt = ft.GetLineDetailDataFromDB("0", refnum);
foreach (DataRow dr2 in dt.Rows)
{
DataRow drnew = dtfinal.NewRow();
for (int j = 0; j < dt.Columns.Count; j++)
{
drnew[dt.Columns[j].ColumnName] = dr2[dt.Columns[j].ColumnName];//asign dt2's data to dt
}
dtfinal.Rows.Add(drnew);
}
BindConsolidationDGV(ref dtfinal, ref NameValueCollection, refnum, dtfinal.Rows.Count);
BindUpdateDGV(ref dtfinal, ref NameValueUpdate, refnum, dtfinal.Rows.Count);
}
isFromBindingFlag = true;
this.dgvLD.DataSource = dtfinal;
bindCount = dtfinal.Rows.Count;
this.panel4.AutoSize = true;
this.panel5.Controls.Add(dgvLD);
for (int i = 0; i < dgvLD.Columns.Count; i++) { dgvLD.Columns[i].SortMode = DataGridViewColumnSortMode.NotSortable; if (i < 7) dgvLD.Columns[i].DefaultCellStyle.BackColor = System.Drawing.Color.Ivory; }
}
private int bindCount = 0;
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
public void button1_Click(object sender, EventArgs e)
{
try
{
Save(sender, null);
SaveCons(sender, null);
SaveTransUpd(sender, null);
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString(), "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
Ribbon2._MyOutputCustomTaskPane.Visible = false;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="ws"></param>
public void Save(object sender, Microsoft.Office.Interop.Excel.Worksheet ws)
{
List<string> LineIndicatorList = new List<string>();
List<string> startInCellList = new List<string>();
SessionInfo.UserInfo.AllowBalTran = "";
SessionInfo.UserInfo.AllowPostToSuspended = "";
if (!string.IsNullOrEmpty(SessionInfo.UserInfo.File_ftid))
{
SqlConnection conn = null;
SqlDataReader rdr = null;
try
{
conn = new
SqlConnection(ConfigurationManager.ConnectionStrings["conRsTool"].ConnectionString.ToString());
conn.Open();
SqlCommand cmd = new SqlCommand("rsTemplateJournal_Del", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd.Parameters.Add(new SqlParameter("@Type", "0"));
rdr = cmd.ExecuteReader();
rdr.Close();
finallist.Clear(); //Initiate data
SqlCommand cmd2 = new SqlCommand("rsTemplateJournal_Ins", conn);
cmd2.CommandType = CommandType.StoredProcedure;
for (int i = 0; i < this.dgvLD.Rows.Count; i++)
{
bool iscontinue = false;
for (int j = 0; j < dgvLD.Columns.Count; j++)
{
if (dgvLD.Rows[i].Cells[j].Style.BackColor == Color.Yellow || dgvLD.Rows[i].Cells[j].Style.BackColor == Color.Aqua)
{
iscontinue = true;
}
}
if (iscontinue)
continue;
string startCell = string.Empty;
string journalN = string.Empty;
string journalLN = string.Empty;
string abt = string.Empty;
string apsa = string.Empty;
Specialist re = new Specialist();
re.Reference = this.dgvLD.Rows[i].Cells[0].Value == null ? "" : this.dgvLD.Rows[i].Cells[0].Value.ToString().Replace(" ", "");
re.SaveReference = this.dgvLD.Rows[i].Cells[1].Value == null ? "" : this.dgvLD.Rows[i].Cells[1].Value.ToString();
re.BalanceBy = this.dgvLD.Rows[i].Cells[2].Value == null ? "" : this.dgvLD.Rows[i].Cells[2].Value.ToString();
abt = this.dgvLD.Rows[i].Cells[3].Value == null ? "" : this.dgvLD.Rows[i].Cells[3].Value.ToString();
apsa = this.dgvLD.Rows[i].Cells[4].Value == null ? "" : this.dgvLD.Rows[i].Cells[4].Value.ToString();
re.AllowBalTrans = abt;
re.AllowPostSuspAcco = apsa;
re.LineIndicator = this.dgvLD.Rows[i].Cells[5].Value == null ? "" : this.dgvLD.Rows[i].Cells[5].Value.ToString();
startCell = this.dgvLD.Rows[i].Cells[6].Value == null ? "" : this.dgvLD.Rows[i].Cells[6].Value.ToString();
re.StartInCell = startCell;
re.populatecellwithJN = this.dgvLD.Rows[i].Cells[7].Value == null ? "" : this.dgvLD.Rows[i].Cells[7].Value.ToString();
journalN = this.dgvLD.Rows[i].Cells[8].Value == null ? "" : this.dgvLD.Rows[i].Cells[8].Value.ToString();
journalLN = this.dgvLD.Rows[i].Cells[9].Value == null ? "" : this.dgvLD.Rows[i].Cells[9].Value.ToString();
re.Ledger = this.dgvLD.Rows[i].Cells[10].Value == null ? "" : this.dgvLD.Rows[i].Cells[10].Value.ToString();
re.AccountCode = this.dgvLD.Rows[i].Cells[11].Value == null ? "" : this.dgvLD.Rows[i].Cells[11].Value.ToString();
re.AccountingPeriod = this.dgvLD.Rows[i].Cells[12].Value == null ? "" : this.dgvLD.Rows[i].Cells[12].Value.ToString();
re.TransactionDate = this.dgvLD.Rows[i].Cells[13].Value == null ? "" : this.dgvLD.Rows[i].Cells[13].Value.ToString();
re.DueDate = this.dgvLD.Rows[i].Cells[14].Value == null ? "" : this.dgvLD.Rows[i].Cells[14].Value.ToString();
re.JournalType = this.dgvLD.Rows[i].Cells[15].Value == null ? "" : this.dgvLD.Rows[i].Cells[15].Value.ToString();
re.JournalSource = this.dgvLD.Rows[i].Cells[16].Value == null ? "" : this.dgvLD.Rows[i].Cells[16].Value.ToString();
re.TransactionReference = this.dgvLD.Rows[i].Cells[17].Value == null ? "" : this.dgvLD.Rows[i].Cells[17].Value.ToString();
re.Description = this.dgvLD.Rows[i].Cells[18].Value == null ? "" : this.dgvLD.Rows[i].Cells[18].Value.ToString();
re.AllocationMarker = this.dgvLD.Rows[i].Cells[19].Value == null ? "" : this.dgvLD.Rows[i].Cells[19].Value.ToString();
re.AnalysisCode1 = this.dgvLD.Rows[i].Cells[20].Value == null ? "" : this.dgvLD.Rows[i].Cells[20].Value.ToString();
re.AnalysisCode2 = this.dgvLD.Rows[i].Cells[21].Value == null ? "" : this.dgvLD.Rows[i].Cells[21].Value.ToString();
re.AnalysisCode3 = this.dgvLD.Rows[i].Cells[22].Value == null ? "" : this.dgvLD.Rows[i].Cells[22].Value.ToString();
re.AnalysisCode4 = this.dgvLD.Rows[i].Cells[23].Value == null ? "" : this.dgvLD.Rows[i].Cells[23].Value.ToString();
re.AnalysisCode5 = this.dgvLD.Rows[i].Cells[24].Value == null ? "" : this.dgvLD.Rows[i].Cells[24].Value.ToString();
re.AnalysisCode6 = this.dgvLD.Rows[i].Cells[25].Value == null ? "" : this.dgvLD.Rows[i].Cells[25].Value.ToString();
re.AnalysisCode7 = this.dgvLD.Rows[i].Cells[26].Value == null ? "" : this.dgvLD.Rows[i].Cells[26].Value.ToString();
re.AnalysisCode8 = this.dgvLD.Rows[i].Cells[27].Value == null ? "" : this.dgvLD.Rows[i].Cells[27].Value.ToString();
re.AnalysisCode9 = this.dgvLD.Rows[i].Cells[28].Value == null ? "" : this.dgvLD.Rows[i].Cells[28].Value.ToString();
re.AnalysisCode10 = this.dgvLD.Rows[i].Cells[29].Value == null ? "" : this.dgvLD.Rows[i].Cells[29].Value.ToString();
re.GenDesc1 = this.dgvLD.Rows[i].Cells[30].Value == null ? "" : this.dgvLD.Rows[i].Cells[30].Value.ToString();
re.GenDesc2 = this.dgvLD.Rows[i].Cells[31].Value == null ? "" : this.dgvLD.Rows[i].Cells[31].Value.ToString();
re.GenDesc3 = this.dgvLD.Rows[i].Cells[32].Value == null ? "" : this.dgvLD.Rows[i].Cells[32].Value.ToString();
re.GenDesc4 = this.dgvLD.Rows[i].Cells[33].Value == null ? "" : this.dgvLD.Rows[i].Cells[33].Value.ToString();
re.GenDesc5 = this.dgvLD.Rows[i].Cells[34].Value == null ? "" : this.dgvLD.Rows[i].Cells[34].Value.ToString();
re.GenDesc6 = this.dgvLD.Rows[i].Cells[35].Value == null ? "" : this.dgvLD.Rows[i].Cells[35].Value.ToString();
re.GenDesc7 = this.dgvLD.Rows[i].Cells[36].Value == null ? "" : this.dgvLD.Rows[i].Cells[36].Value.ToString();
re.GenDesc8 = this.dgvLD.Rows[i].Cells[37].Value == null ? "" : this.dgvLD.Rows[i].Cells[37].Value.ToString();
re.GenDesc9 = this.dgvLD.Rows[i].Cells[38].Value == null ? "" : this.dgvLD.Rows[i].Cells[38].Value.ToString();
re.GenDesc10 = this.dgvLD.Rows[i].Cells[39].Value == null ? "" : this.dgvLD.Rows[i].Cells[39].Value.ToString();
re.GenDesc11 = this.dgvLD.Rows[i].Cells[40].Value == null ? "" : this.dgvLD.Rows[i].Cells[40].Value.ToString();
re.GenDesc12 = this.dgvLD.Rows[i].Cells[41].Value == null ? "" : this.dgvLD.Rows[i].Cells[41].Value.ToString();
re.GenDesc13 = this.dgvLD.Rows[i].Cells[42].Value == null ? "" : this.dgvLD.Rows[i].Cells[42].Value.ToString();
re.GenDesc14 = this.dgvLD.Rows[i].Cells[43].Value == null ? "" : this.dgvLD.Rows[i].Cells[43].Value.ToString();
re.GenDesc15 = this.dgvLD.Rows[i].Cells[44].Value == null ? "" : this.dgvLD.Rows[i].Cells[44].Value.ToString();
re.GenDesc16 = this.dgvLD.Rows[i].Cells[45].Value == null ? "" : this.dgvLD.Rows[i].Cells[45].Value.ToString();
re.GenDesc17 = this.dgvLD.Rows[i].Cells[46].Value == null ? "" : this.dgvLD.Rows[i].Cells[46].Value.ToString();
re.GenDesc18 = this.dgvLD.Rows[i].Cells[47].Value == null ? "" : this.dgvLD.Rows[i].Cells[47].Value.ToString();
re.GenDesc19 = this.dgvLD.Rows[i].Cells[48].Value == null ? "" : this.dgvLD.Rows[i].Cells[48].Value.ToString();
re.GenDesc20 = this.dgvLD.Rows[i].Cells[49].Value == null ? "" : this.dgvLD.Rows[i].Cells[49].Value.ToString();
re.GenDesc21 = this.dgvLD.Rows[i].Cells[50].Value == null ? "" : this.dgvLD.Rows[i].Cells[50].Value.ToString();
re.GenDesc22 = this.dgvLD.Rows[i].Cells[51].Value == null ? "" : this.dgvLD.Rows[i].Cells[51].Value.ToString();
re.GenDesc23 = this.dgvLD.Rows[i].Cells[52].Value == null ? "" : this.dgvLD.Rows[i].Cells[52].Value.ToString();
re.GenDesc24 = this.dgvLD.Rows[i].Cells[53].Value == null ? "" : this.dgvLD.Rows[i].Cells[53].Value.ToString();
re.GenDesc25 = this.dgvLD.Rows[i].Cells[54].Value == null ? "" : this.dgvLD.Rows[i].Cells[54].Value.ToString();
re.TransactionAmount = this.dgvLD.Rows[i].Cells[55].Value == null ? "" : this.dgvLD.Rows[i].Cells[55].Value.ToString();
re.CurrencyCode = this.dgvLD.Rows[i].Cells[56].Value == null ? "" : this.dgvLD.Rows[i].Cells[56].Value.ToString();
re.DebitCredit = "";
re.BaseAmount = this.dgvLD.Rows[i].Cells[57].Value == null ? "" : this.dgvLD.Rows[i].Cells[57].Value.ToString();
re.Base2ReportingAmount = this.dgvLD.Rows[i].Cells[58].Value == null ? "" : this.dgvLD.Rows[i].Cells[58].Value.ToString();
re.Value4Amount = this.dgvLD.Rows[i].Cells[59].Value == null ? "" : this.dgvLD.Rows[i].Cells[59].Value.ToString();
if (string.IsNullOrEmpty(re.ToString())) continue;
if (string.IsNullOrEmpty(re.SaveReference))
this.dgvLD.Rows[i].Cells[1].ErrorText = "nullable, but unexpected result would happen when save transaction.";
else
this.dgvLD.Rows[i].Cells[1].ErrorText = string.Empty;
if (string.IsNullOrEmpty(re.LineIndicator))
this.dgvLD.Rows[i].Cells[5].ErrorText = "Not null.";
else
this.dgvLD.Rows[i].Cells[5].ErrorText = string.Empty;
cmd2.Parameters.Add(new SqlParameter("@Ledger", re.Ledger));
cmd2.Parameters.Add(new SqlParameter("@ft_Account", re.AccountCode));
cmd2.Parameters.Add(new SqlParameter("@Period", re.AccountingPeriod));
cmd2.Parameters.Add(new SqlParameter("@TransDate", re.TransactionDate));
cmd2.Parameters.Add(new SqlParameter("@DueDate", re.DueDate));
cmd2.Parameters.Add(new SqlParameter("@JrnlType", re.JournalType));
cmd2.Parameters.Add(new SqlParameter("@JrnlSource", re.JournalSource));
cmd2.Parameters.Add(new SqlParameter("@TransRef", re.TransactionReference));
cmd2.Parameters.Add(new SqlParameter("@Description", re.Description));
cmd2.Parameters.Add(new SqlParameter("@AlloctnMarker", re.AllocationMarker));
cmd2.Parameters.Add(new SqlParameter("@LA1", re.AnalysisCode1));
cmd2.Parameters.Add(new SqlParameter("@LA2", re.AnalysisCode2));
cmd2.Parameters.Add(new SqlParameter("@LA3", re.AnalysisCode3));
cmd2.Parameters.Add(new SqlParameter("@LA4", re.AnalysisCode4));
cmd2.Parameters.Add(new SqlParameter("@LA5", re.AnalysisCode5));
cmd2.Parameters.Add(new SqlParameter("@LA6", re.AnalysisCode6));
cmd2.Parameters.Add(new SqlParameter("@LA7", re.AnalysisCode7));
cmd2.Parameters.Add(new SqlParameter("@LA8", re.AnalysisCode8));
cmd2.Parameters.Add(new SqlParameter("@LA9", re.AnalysisCode9));
cmd2.Parameters.Add(new SqlParameter("@LA10", re.AnalysisCode10));
cmd2.Parameters.Add(new SqlParameter("@GenDesc1", re.GenDesc1));
cmd2.Parameters.Add(new SqlParameter("@GenDesc2", re.GenDesc2));
cmd2.Parameters.Add(new SqlParameter("@GenDesc3", re.GenDesc3));
cmd2.Parameters.Add(new SqlParameter("@GenDesc4", re.GenDesc4));
cmd2.Parameters.Add(new SqlParameter("@GenDesc5", re.GenDesc5));
cmd2.Parameters.Add(new SqlParameter("@GenDesc6", re.GenDesc6));
cmd2.Parameters.Add(new SqlParameter("@GenDesc7", re.GenDesc7));
cmd2.Parameters.Add(new SqlParameter("@GenDesc8", re.GenDesc8));
cmd2.Parameters.Add(new SqlParameter("@GenDesc9", re.GenDesc9));
cmd2.Parameters.Add(new SqlParameter("@GenDesc10", re.GenDesc10));
cmd2.Parameters.Add(new SqlParameter("@GenDesc11", re.GenDesc11));
cmd2.Parameters.Add(new SqlParameter("@GenDesc12", re.GenDesc12));
cmd2.Parameters.Add(new SqlParameter("@GenDesc13", re.GenDesc13));
cmd2.Parameters.Add(new SqlParameter("@GenDesc14", re.GenDesc14));
cmd2.Parameters.Add(new SqlParameter("@GenDesc15", re.GenDesc15));
cmd2.Parameters.Add(new SqlParameter("@GenDesc16", re.GenDesc16));
cmd2.Parameters.Add(new SqlParameter("@GenDesc17", re.GenDesc17));
cmd2.Parameters.Add(new SqlParameter("@GenDesc18", re.GenDesc18));
cmd2.Parameters.Add(new SqlParameter("@GenDesc19", re.GenDesc19));
cmd2.Parameters.Add(new SqlParameter("@GenDesc20", re.GenDesc20));
cmd2.Parameters.Add(new SqlParameter("@GenDesc21", re.GenDesc21));
cmd2.Parameters.Add(new SqlParameter("@GenDesc22", re.GenDesc22));
cmd2.Parameters.Add(new SqlParameter("@GenDesc23", re.GenDesc23));
cmd2.Parameters.Add(new SqlParameter("@GenDesc24", re.GenDesc24));
cmd2.Parameters.Add(new SqlParameter("@GenDesc25", re.GenDesc25));
cmd2.Parameters.Add(new SqlParameter("@TransAmount", re.TransactionAmount));
cmd2.Parameters.Add(new SqlParameter("@Currency", re.CurrencyCode));
cmd2.Parameters.Add(new SqlParameter("@BaseAmount", re.BaseAmount));
cmd2.Parameters.Add(new SqlParameter("@2ndBase", re.Base2ReportingAmount));
cmd2.Parameters.Add(new SqlParameter("@4thAmount", re.Value4Amount));
cmd2.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd2.Parameters.Add(new SqlParameter("@LineIndicator", re.LineIndicator));
cmd2.Parameters.Add(new SqlParameter("@StartinginCell", startCell));
cmd2.Parameters.Add(new SqlParameter("@BalanceBy", re.BalanceBy));
cmd2.Parameters.Add(new SqlParameter("@PopWithJNNumber", re.populatecellwithJN));
cmd2.Parameters.Add(new SqlParameter("@Reference", re.Reference));
cmd2.Parameters.Add(new SqlParameter("@SaveReference", re.SaveReference));
cmd2.Parameters.Add(new SqlParameter("@JournalNumber", journalN));
cmd2.Parameters.Add(new SqlParameter("@JournalLineNumber", journalLN));
cmd2.Parameters.Add(new SqlParameter("@InputFields", ""));
cmd2.Parameters.Add(new SqlParameter("@UpdateFields", ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy1", ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy2", ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy3", ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy4", ""));
cmd2.Parameters.Add(new SqlParameter("@Type", "0"));//0.post 1.consolidation 2.update
cmd2.Parameters.Add(new SqlParameter("@AllowBalTrans", abt));
cmd2.Parameters.Add(new SqlParameter("@AllowPostSuspAcco", apsa));
rdr = cmd2.ExecuteReader();
rdr.Close();
cmd2.Parameters.Clear();
if (!string.IsNullOrEmpty(re.LineIndicator) && !string.IsNullOrEmpty(startCell))
{
finallist.Add(re);
LineIndicatorList.Add(re.LineIndicator);
startInCellList.Add(startCell);
}
}
this.Invalidate();
if (sender == null)
AddLineDetailEntityListIntoFinalList(finallist, LineIndicatorList, startInCellList, ws, ref finallist);
}
catch (Exception ex)
{
throw new Exception(ex.Message + " - Data Error in Journal tab, Output settings !");
}
finally
{
if (conn != null)
{
conn.Close();
}
if (rdr != null)
{
rdr.Close();
}
}
}
}
/// <summary>
///
/// </summary>
/// <param name="list"></param>
/// <param name="LineIndicatorList"></param>
/// <param name="StartingInCell"></param>
/// <param name="ws"></param>
/// <param name="final"></param>
private void AddLineDetailEntityListIntoFinalListForTransUpd(List<ExcelAddIn4.Common2.Specialist> list, List<string> LineIndicatorList, List<string> StartingInCellList, Microsoft.Office.Interop.Excel.Worksheet ws, ref List<ExcelAddIn4.Common2.Specialist> final)
{
List<ExcelAddIn4.Common2.Specialist> tmplist2 = new List<ExcelAddIn4.Common2.Specialist>();
List<string> usedString = new List<string>();
Predicate<string> pred = EquaWithName;
for (int i = 0; i < LineIndicatorList.Count; i++)
{
tmpstr = StartingInCellList[i] + "," + LineIndicatorList[i];
if (!usedString.Exists(pred))
{
List<ExcelAddIn4.Common2.Specialist> tmplist = ft.GetEntityListFromDGVForTransUpd(StartingInCellList[i], LineIndicatorList[i], list.FindAll((ExcelAddIn4.Common2.Specialist p) => { return p.LineIndicator == LineIndicatorList[i] & p.StartInCell == StartingInCellList[i]; }), ws);//ft.GetEntityListFromDGVForTransUpd(StartingInCellList[i], LineIndicatorList[i], list[i], ws);
if (tmplist != null)
foreach (ExcelAddIn4.Common2.Specialist sp in tmplist)
{
tmplist2.Add(sp);
}
usedString.Add(StartingInCellList[i] + "," + LineIndicatorList[i]);
}
}
final = tmplist2;
}
/// <summary>
///
/// </summary>
/// <param name="elem"></param>
/// <returns></returns>
private bool EquaWithName(string elem)
{
if (elem == tmpstr)
return true;
return false;
}
string tmpstr = string.Empty;
/// <summary>
///
/// </summary>
/// <param name="list"></param>
/// <param name="LineIndicatorList"></param>
/// <param name="StartingInCell"></param>
/// <param name="ws"></param>
/// <param name="final"></param>
private void AddLineDetailEntityListIntoFinalList(List<Specialist> list, List<string> LineIndicatorList, List<string> StartingInCellList, Microsoft.Office.Interop.Excel.Worksheet ws, ref List<Specialist> final)
{
List<Specialist> tmplist2 = new List<Specialist>();
List<string> usedString = new List<string>();
Predicate<string> pred = EquaWithName;
for (int i = 0; i < LineIndicatorList.Count; i++)
{
tmpstr = StartingInCellList[i] + "," + LineIndicatorList[i];
if (!usedString.Exists(pred))
{
List<Specialist> tmplist = ft.GetEntityListFromDGV(StartingInCellList[i], LineIndicatorList[i], list.FindAll((Specialist p) => { return p.LineIndicator == LineIndicatorList[i] & p.StartInCell == StartingInCellList[i]; }), ws);//ft.GetEntityListFromDGV(StartingInCellList[i], LineIndicatorList[i], list[i], ws);
if (tmplist != null)
foreach (Specialist sp in tmplist)
{
tmplist2.Add(sp);
}
usedString.Add(StartingInCellList[i] + "," + LineIndicatorList[i]);
}
}
final = tmplist2;
}
/// <summary>
///
/// </summary>
/// <param name="list"></param>
/// <param name="LineIndicatorList"></param>
/// <param name="StartingInCell"></param>
/// <param name="ws"></param>
/// <param name="final"></param>
private void AddCreateTextFileEntityListIntoFinalList(List<RowCreateTextFile> list, List<string> LineIndicatorList, List<string> StartingInCellList, Microsoft.Office.Interop.Excel.Worksheet ws, ref List<RowCreateTextFile> final)
{
List<RowCreateTextFile> tmplist2 = new List<RowCreateTextFile>();
List<string> usedString = new List<string>();
Predicate<string> pred = EquaWithName;
for (int i = 0; i < LineIndicatorList.Count; i++)
{
tmpstr = StartingInCellList[i] + "," + LineIndicatorList[i];
if (!usedString.Exists(pred))
{
List<RowCreateTextFile> tmplist = ft.GetEntityListFromDGVForCreateTextFile(StartingInCellList[i], LineIndicatorList[i], list.FindAll((RowCreateTextFile p) => { return p.LineIndicator == LineIndicatorList[i] & p.StartinginCell == StartingInCellList[i]; }), ws);//ft.GetEntityListFromDGVForCreateTextFile(StartingInCellList[i], LineIndicatorList[i], list[i], ws);
if (tmplist != null)
foreach (RowCreateTextFile sp in tmplist)
{
tmplist2.Add(sp);
}
usedString.Add(StartingInCellList[i] + "," + LineIndicatorList[i]);
}
}
final = tmplist2;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="ws"></param>
public void SaveCons(object sender, Microsoft.Office.Interop.Excel.Worksheet ws)
{
List<string> LineIndicatorList = new List<string>();
List<string> startInCellList = new List<string>();
List<string> consStr = new List<string>();
string str = string.Empty;
List<int> ConsolidationRowNumber = new List<int>();
int consCurrentCount = 0;
for (int i = 0; i < dgvLD.Rows.Count; i++)
{
for (int j = 0; j < dgvLD.Columns.Count; j++)
{
if (dgvLD.Rows[i].Cells[j].Style.BackColor == Color.Yellow)
{
str += dgvLD.Columns[j].Name + ",";
ConsolidationRowNumber.Add(i);
}
}
if (!string.IsNullOrEmpty(str))
{
consStr.Add(str);
str = "";
}
}
if (dgvLD.RowCount == 1)
{
return;
}
if (!string.IsNullOrEmpty(SessionInfo.UserInfo.File_ftid))
{
SqlConnection conn = null;
SqlDataReader rdr = null;
try
{
conn = new
SqlConnection(ConfigurationManager.ConnectionStrings["conRsTool"].ConnectionString.ToString());// create and open a connection object "Server=(local);Database=RSFinanceTools;User ID=sa;Password=as
conn.Open();
SqlCommand cmd = new SqlCommand("rsTemplateJournal_Del", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd.Parameters.Add(new SqlParameter("@Type", "1"));
rdr = cmd.ExecuteReader();
rdr.Close();
List<Specialist> ll = new List<Specialist>();//Initiate data //finallist.Clear();
SqlCommand cmd2 = new SqlCommand("rsTemplateJournal_Ins", conn);
cmd2.CommandType = CommandType.StoredProcedure;
for (int i = 0; i < this.dgvLD.Rows.Count; i++)
{
if (!ConsolidationRowNumber.Contains(i))
{
continue;
}
string startCell = string.Empty;
string journalN = string.Empty;
string journalLN = string.Empty;
string abt = string.Empty;
string apsa = string.Empty;
Specialist re = new Specialist();
re.Reference = this.dgvLD.Rows[i].Cells[0].Value == null ? "" : this.dgvLD.Rows[i].Cells[0].Value.ToString().Replace(" ", "");
re.SaveReference = this.dgvLD.Rows[i].Cells[1].Value == null ? "" : this.dgvLD.Rows[i].Cells[1].Value.ToString();
re.BalanceBy = this.dgvLD.Rows[i].Cells[2].Value == null ? "" : this.dgvLD.Rows[i].Cells[2].Value.ToString();
abt = this.dgvLD.Rows[i].Cells[3].Value == null ? "" : this.dgvLD.Rows[i].Cells[3].Value.ToString();
apsa = this.dgvLD.Rows[i].Cells[4].Value == null ? "" : this.dgvLD.Rows[i].Cells[4].Value.ToString();
re.AllowBalTrans = abt;
re.AllowPostSuspAcco = apsa;
re.LineIndicator = this.dgvLD.Rows[i].Cells[5].Value == null ? "" : this.dgvLD.Rows[i].Cells[5].Value.ToString();
startCell = this.dgvLD.Rows[i].Cells[6].Value == null ? "" : this.dgvLD.Rows[i].Cells[6].Value.ToString();
re.populatecellwithJN = this.dgvLD.Rows[i].Cells[7].Value == null ? "" : this.dgvLD.Rows[i].Cells[7].Value.ToString();
journalN = this.dgvLD.Rows[i].Cells[8].Value == null ? "" : this.dgvLD.Rows[i].Cells[8].Value.ToString();
journalLN = this.dgvLD.Rows[i].Cells[9].Value == null ? "" : this.dgvLD.Rows[i].Cells[9].Value.ToString();
re.Ledger = this.dgvLD.Rows[i].Cells[10].Value == null ? "" : this.dgvLD.Rows[i].Cells[10].Value.ToString();
re.AccountCode = this.dgvLD.Rows[i].Cells[11].Value == null ? "" : this.dgvLD.Rows[i].Cells[11].Value.ToString();
re.AccountingPeriod = this.dgvLD.Rows[i].Cells[12].Value == null ? "" : this.dgvLD.Rows[i].Cells[12].Value.ToString();
re.TransactionDate = this.dgvLD.Rows[i].Cells[13].Value == null ? "" : this.dgvLD.Rows[i].Cells[13].Value.ToString();
re.DueDate = this.dgvLD.Rows[i].Cells[14].Value == null ? "" : this.dgvLD.Rows[i].Cells[14].Value.ToString();
re.JournalType = this.dgvLD.Rows[i].Cells[15].Value == null ? "" : this.dgvLD.Rows[i].Cells[15].Value.ToString();
re.JournalSource = this.dgvLD.Rows[i].Cells[16].Value == null ? "" : this.dgvLD.Rows[i].Cells[16].Value.ToString();
re.TransactionReference = this.dgvLD.Rows[i].Cells[17].Value == null ? "" : this.dgvLD.Rows[i].Cells[17].Value.ToString();
re.Description = this.dgvLD.Rows[i].Cells[18].Value == null ? "" : this.dgvLD.Rows[i].Cells[18].Value.ToString();
re.AllocationMarker = this.dgvLD.Rows[i].Cells[19].Value == null ? "" : this.dgvLD.Rows[i].Cells[19].Value.ToString();
re.AnalysisCode1 = this.dgvLD.Rows[i].Cells[20].Value == null ? "" : this.dgvLD.Rows[i].Cells[20].Value.ToString();
re.AnalysisCode2 = this.dgvLD.Rows[i].Cells[21].Value == null ? "" : this.dgvLD.Rows[i].Cells[21].Value.ToString();
re.AnalysisCode3 = this.dgvLD.Rows[i].Cells[22].Value == null ? "" : this.dgvLD.Rows[i].Cells[22].Value.ToString();
re.AnalysisCode4 = this.dgvLD.Rows[i].Cells[23].Value == null ? "" : this.dgvLD.Rows[i].Cells[23].Value.ToString();
re.AnalysisCode5 = this.dgvLD.Rows[i].Cells[24].Value == null ? "" : this.dgvLD.Rows[i].Cells[24].Value.ToString();
re.AnalysisCode6 = this.dgvLD.Rows[i].Cells[25].Value == null ? "" : this.dgvLD.Rows[i].Cells[25].Value.ToString();
re.AnalysisCode7 = this.dgvLD.Rows[i].Cells[26].Value == null ? "" : this.dgvLD.Rows[i].Cells[26].Value.ToString();
re.AnalysisCode8 = this.dgvLD.Rows[i].Cells[27].Value == null ? "" : this.dgvLD.Rows[i].Cells[27].Value.ToString();
re.AnalysisCode9 = this.dgvLD.Rows[i].Cells[28].Value == null ? "" : this.dgvLD.Rows[i].Cells[28].Value.ToString();
re.AnalysisCode10 = this.dgvLD.Rows[i].Cells[29].Value == null ? "" : this.dgvLD.Rows[i].Cells[29].Value.ToString();
re.GenDesc1 = this.dgvLD.Rows[i].Cells[30].Value == null ? "" : this.dgvLD.Rows[i].Cells[30].Value.ToString();
re.GenDesc2 = this.dgvLD.Rows[i].Cells[31].Value == null ? "" : this.dgvLD.Rows[i].Cells[31].Value.ToString();
re.GenDesc3 = this.dgvLD.Rows[i].Cells[32].Value == null ? "" : this.dgvLD.Rows[i].Cells[32].Value.ToString();
re.GenDesc4 = this.dgvLD.Rows[i].Cells[33].Value == null ? "" : this.dgvLD.Rows[i].Cells[33].Value.ToString();
re.GenDesc5 = this.dgvLD.Rows[i].Cells[34].Value == null ? "" : this.dgvLD.Rows[i].Cells[34].Value.ToString();
re.GenDesc6 = this.dgvLD.Rows[i].Cells[35].Value == null ? "" : this.dgvLD.Rows[i].Cells[35].Value.ToString();
re.GenDesc7 = this.dgvLD.Rows[i].Cells[36].Value == null ? "" : this.dgvLD.Rows[i].Cells[36].Value.ToString();
re.GenDesc8 = this.dgvLD.Rows[i].Cells[37].Value == null ? "" : this.dgvLD.Rows[i].Cells[37].Value.ToString();
re.GenDesc9 = this.dgvLD.Rows[i].Cells[38].Value == null ? "" : this.dgvLD.Rows[i].Cells[38].Value.ToString();
re.GenDesc10 = this.dgvLD.Rows[i].Cells[39].Value == null ? "" : this.dgvLD.Rows[i].Cells[39].Value.ToString();
re.GenDesc11 = this.dgvLD.Rows[i].Cells[40].Value == null ? "" : this.dgvLD.Rows[i].Cells[40].Value.ToString();
re.GenDesc12 = this.dgvLD.Rows[i].Cells[41].Value == null ? "" : this.dgvLD.Rows[i].Cells[41].Value.ToString();
re.GenDesc13 = this.dgvLD.Rows[i].Cells[42].Value == null ? "" : this.dgvLD.Rows[i].Cells[42].Value.ToString();
re.GenDesc14 = this.dgvLD.Rows[i].Cells[43].Value == null ? "" : this.dgvLD.Rows[i].Cells[43].Value.ToString();
re.GenDesc15 = this.dgvLD.Rows[i].Cells[44].Value == null ? "" : this.dgvLD.Rows[i].Cells[44].Value.ToString();
re.GenDesc16 = this.dgvLD.Rows[i].Cells[45].Value == null ? "" : this.dgvLD.Rows[i].Cells[45].Value.ToString();
re.GenDesc17 = this.dgvLD.Rows[i].Cells[46].Value == null ? "" : this.dgvLD.Rows[i].Cells[46].Value.ToString();
re.GenDesc18 = this.dgvLD.Rows[i].Cells[47].Value == null ? "" : this.dgvLD.Rows[i].Cells[47].Value.ToString();
re.GenDesc19 = this.dgvLD.Rows[i].Cells[48].Value == null ? "" : this.dgvLD.Rows[i].Cells[48].Value.ToString();
re.GenDesc20 = this.dgvLD.Rows[i].Cells[49].Value == null ? "" : this.dgvLD.Rows[i].Cells[49].Value.ToString();
re.GenDesc21 = this.dgvLD.Rows[i].Cells[50].Value == null ? "" : this.dgvLD.Rows[i].Cells[50].Value.ToString();
re.GenDesc22 = this.dgvLD.Rows[i].Cells[51].Value == null ? "" : this.dgvLD.Rows[i].Cells[51].Value.ToString();
re.GenDesc23 = this.dgvLD.Rows[i].Cells[52].Value == null ? "" : this.dgvLD.Rows[i].Cells[52].Value.ToString();
re.GenDesc24 = this.dgvLD.Rows[i].Cells[53].Value == null ? "" : this.dgvLD.Rows[i].Cells[53].Value.ToString();
re.GenDesc25 = this.dgvLD.Rows[i].Cells[54].Value == null ? "" : this.dgvLD.Rows[i].Cells[54].Value.ToString();
re.TransactionAmount = this.dgvLD.Rows[i].Cells[55].Value == null ? "" : this.dgvLD.Rows[i].Cells[55].Value.ToString();
re.CurrencyCode = this.dgvLD.Rows[i].Cells[56].Value == null ? "" : this.dgvLD.Rows[i].Cells[56].Value.ToString();
re.DebitCredit = "";
re.BaseAmount = this.dgvLD.Rows[i].Cells[57].Value == null ? "" : this.dgvLD.Rows[i].Cells[57].Value.ToString();
re.Base2ReportingAmount = this.dgvLD.Rows[i].Cells[58].Value == null ? "" : this.dgvLD.Rows[i].Cells[58].Value.ToString();
re.Value4Amount = this.dgvLD.Rows[i].Cells[59].Value == null ? "" : this.dgvLD.Rows[i].Cells[59].Value.ToString();
if (string.IsNullOrEmpty(re.ToString())) continue;
if (string.IsNullOrEmpty(re.SaveReference))
this.dgvLD.Rows[i].Cells[1].ErrorText = "nullable, but unexpected result would happen when save transaction.";
else
this.dgvLD.Rows[i].Cells[1].ErrorText = string.Empty;
if (string.IsNullOrEmpty(re.LineIndicator))
this.dgvLD.Rows[i].Cells[5].ErrorText = "Not null.";
else
this.dgvLD.Rows[i].Cells[5].ErrorText = string.Empty;
cmd2.Parameters.Add(new SqlParameter("@Ledger", re.Ledger));
cmd2.Parameters.Add(new SqlParameter("@ft_Account", re.AccountCode));
cmd2.Parameters.Add(new SqlParameter("@Period", re.AccountingPeriod));
cmd2.Parameters.Add(new SqlParameter("@TransDate", re.TransactionDate));
cmd2.Parameters.Add(new SqlParameter("@DueDate", re.DueDate));
cmd2.Parameters.Add(new SqlParameter("@JrnlType", re.JournalType));
cmd2.Parameters.Add(new SqlParameter("@JrnlSource", re.JournalSource));
cmd2.Parameters.Add(new SqlParameter("@TransRef", re.TransactionReference));
cmd2.Parameters.Add(new SqlParameter("@Description", re.Description));
cmd2.Parameters.Add(new SqlParameter("@AlloctnMarker", re.AllocationMarker));
cmd2.Parameters.Add(new SqlParameter("@LA1", re.AnalysisCode1));
cmd2.Parameters.Add(new SqlParameter("@LA2", re.AnalysisCode2));
cmd2.Parameters.Add(new SqlParameter("@LA3", re.AnalysisCode3));
cmd2.Parameters.Add(new SqlParameter("@LA4", re.AnalysisCode4));
cmd2.Parameters.Add(new SqlParameter("@LA5", re.AnalysisCode5));
cmd2.Parameters.Add(new SqlParameter("@LA6", re.AnalysisCode6));
cmd2.Parameters.Add(new SqlParameter("@LA7", re.AnalysisCode7));
cmd2.Parameters.Add(new SqlParameter("@LA8", re.AnalysisCode8));
cmd2.Parameters.Add(new SqlParameter("@LA9", re.AnalysisCode9));
cmd2.Parameters.Add(new SqlParameter("@LA10", re.AnalysisCode10));
cmd2.Parameters.Add(new SqlParameter("@GenDesc1", re.GenDesc1));
cmd2.Parameters.Add(new SqlParameter("@GenDesc2", re.GenDesc2));
cmd2.Parameters.Add(new SqlParameter("@GenDesc3", re.GenDesc3));
cmd2.Parameters.Add(new SqlParameter("@GenDesc4", re.GenDesc4));
cmd2.Parameters.Add(new SqlParameter("@GenDesc5", re.GenDesc5));
cmd2.Parameters.Add(new SqlParameter("@GenDesc6", re.GenDesc6));
cmd2.Parameters.Add(new SqlParameter("@GenDesc7", re.GenDesc7));
cmd2.Parameters.Add(new SqlParameter("@GenDesc8", re.GenDesc8));
cmd2.Parameters.Add(new SqlParameter("@GenDesc9", re.GenDesc9));
cmd2.Parameters.Add(new SqlParameter("@GenDesc10", re.GenDesc10));
cmd2.Parameters.Add(new SqlParameter("@GenDesc11", re.GenDesc11));
cmd2.Parameters.Add(new SqlParameter("@GenDesc12", re.GenDesc12));
cmd2.Parameters.Add(new SqlParameter("@GenDesc13", re.GenDesc13));
cmd2.Parameters.Add(new SqlParameter("@GenDesc14", re.GenDesc14));
cmd2.Parameters.Add(new SqlParameter("@GenDesc15", re.GenDesc15));
cmd2.Parameters.Add(new SqlParameter("@GenDesc16", re.GenDesc16));
cmd2.Parameters.Add(new SqlParameter("@GenDesc17", re.GenDesc17));
cmd2.Parameters.Add(new SqlParameter("@GenDesc18", re.GenDesc18));
cmd2.Parameters.Add(new SqlParameter("@GenDesc19", re.GenDesc19));
cmd2.Parameters.Add(new SqlParameter("@GenDesc20", re.GenDesc20));
cmd2.Parameters.Add(new SqlParameter("@GenDesc21", re.GenDesc21));
cmd2.Parameters.Add(new SqlParameter("@GenDesc22", re.GenDesc22));
cmd2.Parameters.Add(new SqlParameter("@GenDesc23", re.GenDesc23));
cmd2.Parameters.Add(new SqlParameter("@GenDesc24", re.GenDesc24));
cmd2.Parameters.Add(new SqlParameter("@GenDesc25", re.GenDesc25));
cmd2.Parameters.Add(new SqlParameter("@TransAmount", re.TransactionAmount));
cmd2.Parameters.Add(new SqlParameter("@Currency", re.CurrencyCode));
cmd2.Parameters.Add(new SqlParameter("@BaseAmount", re.BaseAmount));
cmd2.Parameters.Add(new SqlParameter("@2ndBase", re.Base2ReportingAmount));
cmd2.Parameters.Add(new SqlParameter("@4thAmount", re.Value4Amount));
cmd2.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd2.Parameters.Add(new SqlParameter("@LineIndicator", re.LineIndicator));
cmd2.Parameters.Add(new SqlParameter("@StartinginCell", startCell));
cmd2.Parameters.Add(new SqlParameter("@BalanceBy", re.BalanceBy));
cmd2.Parameters.Add(new SqlParameter("@PopWithJNNumber", re.populatecellwithJN));
cmd2.Parameters.Add(new SqlParameter("@Reference", re.Reference));
cmd2.Parameters.Add(new SqlParameter("@SaveReference", re.SaveReference));
cmd2.Parameters.Add(new SqlParameter("@JournalNumber", journalN));
cmd2.Parameters.Add(new SqlParameter("@JournalLineNumber", journalLN));
cmd2.Parameters.Add(new SqlParameter("@InputFields", ""));
cmd2.Parameters.Add(new SqlParameter("@UpdateFields", ""));
string[] arr = consStr[consCurrentCount].Split(',');
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy1", (arr.Count() > 0) ? arr[0] : ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy2", (arr.Count() > 1) ? arr[1] : ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy3", (arr.Count() > 2) ? arr[2] : ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy4", (arr.Count() > 3) ? arr[3] : ""));
cmd2.Parameters.Add(new SqlParameter("@Type", "1"));//0.post 1.consolidation 2.update
cmd2.Parameters.Add(new SqlParameter("@AllowBalTrans", abt));
cmd2.Parameters.Add(new SqlParameter("@AllowPostSuspAcco", apsa));
rdr = cmd2.ExecuteReader();
rdr.Close();
cmd2.Parameters.Clear();
if (!string.IsNullOrEmpty(re.LineIndicator) && !string.IsNullOrEmpty(startCell))
{
ll.Add(re);
LineIndicatorList.Add(re.LineIndicator);
startInCellList.Add(startCell);
}
consCurrentCount++;
}
this.Invalidate();
if (sender == null)
{
for (int ii = 0; ii < ll.Count; ii++)
{
string[] arr = consStr[ii].Split(',');
AddConsEntityListIntoFinalList(ll[ii], LineIndicatorList[ii], startInCellList[ii], (arr.Count() > 0) ? arr[0] : "", (arr.Count() > 1) ? arr[1] : "", (arr.Count() > 2) ? arr[2] : "", (arr.Count() > 3) ? arr[3] : "", ws);
}
}
}
catch (Exception ex)
{
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + " - Data Error in Journal tab, Output settings !");
throw new Exception(ex.Message + " - Data Error in Journal tab, Output settings !");
}
finally
{
if (conn != null)
{
conn.Close();
}
if (rdr != null)
{
rdr.Close();
}
}
}
}
/// <summary>
///
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
public static bool IsNumeric(string value)
{
return Regex.IsMatch(value, @"^[+-]?\d*[.]?\d*$");
}
/// <summary>
///
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
public static bool IsInt(string value)
{
return Regex.IsMatch(value, @"^[+-]?\d*$");
}
/// <summary>
///
/// </summary>
/// <param name="value"></param>
/// <returns></returns>
public static bool IsUnsign(string value)
{
return Regex.IsMatch(value, @"^\d*[.]?\d*$");
}
/// <summary>
///
/// </summary>
/// <param name="list"></param>
/// <param name="LineIndicatorList"></param>
/// <param name="StartingInCell"></param>
/// <param name="groupbyKey1"></param>
/// <param name="groupbyKey2"></param>
/// <param name="groupbyKey3"></param>
/// <param name="groupbyKey4"></param>
/// <param name="ws"></param>
private void AddConsEntityListIntoFinalList(Specialist list, string LineIndicator, string StartingInCell, string groupbyKey1, string groupbyKey2, string groupbyKey3, string groupbyKey4, Microsoft.Office.Interop.Excel.Worksheet ws)
{
if (string.IsNullOrEmpty(groupbyKey1) && string.IsNullOrEmpty(groupbyKey2) && string.IsNullOrEmpty(groupbyKey3) && string.IsNullOrEmpty(groupbyKey4)) return;
List<Specialist> tmplist2 = new List<Specialist>();
List<Specialist> input = new List<Specialist>();
input.Add(list);
List<Specialist> newlist = ft.GetEntityListFromDGV(StartingInCell, LineIndicator, input, ws);
if (newlist != null)
foreach (Specialist sp in newlist)
{
tmplist2.Add(sp);
}
DataTable dt = ft.ToDataTable(tmplist2);//, t5 = "Ledger", t6 = "AccountCode", t7 = "AccountingPeriod", t8 = "AllocationMarker", t9 = "AnalysisCode1", t10 = "AnalysisCode10", t11 = "AnalysisCode2", t12 = "AnalysisCode3", t13 = "AnalysisCode4", t14 = "AnalysisCode5", t15 = "AnalysisCode6", t16 = "AnalysisCode7", t17 = "AnalysisCode8", t18 = "AnalysisCode9", t19 = "CurrencyCode", t20 = "DebitCredit", t21 = "JournalSource", t22 = "JournalType"
dt.Columns.Add("");
var query = from t in dt.AsEnumerable()
group t by new { t1 = t.Field<string>(string.IsNullOrEmpty(groupbyKey1) ? "Column1" : groupbyKey1), t2 = t.Field<string>(string.IsNullOrEmpty(groupbyKey2) ? "Column1" : groupbyKey2), t3 = t.Field<string>(string.IsNullOrEmpty(groupbyKey3) ? "Column1" : groupbyKey3), t4 = t.Field<string>(string.IsNullOrEmpty(groupbyKey4.Trim()) ? "Column1" : groupbyKey4) } into m
select new
{
Reference = m.First().Field<string>("Reference"),
SaveReference = m.First().Field<string>("SaveReference"),
populatecellwithJN = m.First().Field<string>("populatecellwithJN"),
BalanceBy = m.First().Field<string>("BalanceBy"),
Ledger = m.First().Field<string>("Ledger"),
AccountCode = m.First().Field<string>("AccountCode"),
AccountingPeriod = m.First().Field<string>("AccountingPeriod"),
AllocationMarker = m.First().Field<string>("AllocationMarker"),
AnalysisCode1 = m.First().Field<string>("AnalysisCode1"),
AnalysisCode10 = m.First().Field<string>("AnalysisCode10"),
AnalysisCode2 = m.First().Field<string>("AnalysisCode2"),
AnalysisCode3 = m.First().Field<string>("AnalysisCode3"),
AnalysisCode4 = m.First().Field<string>("AnalysisCode4"),
AnalysisCode5 = m.First().Field<string>("AnalysisCode5"),
AnalysisCode6 = m.First().Field<string>("AnalysisCode6"),
AnalysisCode7 = m.First().Field<string>("AnalysisCode7"),
AnalysisCode8 = m.First().Field<string>("AnalysisCode8"),
AnalysisCode9 = m.First().Field<string>("AnalysisCode9"),
Base2ReportingAmount = m.Sum(k => Decimal.Parse(k.Field<string>("Base2ReportingAmount"))),
BaseAmount = m.Sum(k => Decimal.Parse(k.Field<string>("BaseAmount"))),
CurrencyCode = m.First().Field<string>("CurrencyCode"),
DebitCredit = m.First().Field<string>("DebitCredit"),
Description = m.First().Field<string>("Description"),
JournalSource = m.First().Field<string>("JournalSource"),
JournalType = m.First().Field<string>("JournalType"),
TransactionAmount = m.Sum(k => Decimal.Parse(k.Field<string>("TransactionAmount"))),
TransactionDate = m.First().Field<string>("TransactionDate"),
DueDate = m.First().Field<string>("DueDate"),
TransactionReference = m.First().Field<string>("TransactionReference"),
Value4Amount = m.Sum(k => Decimal.Parse(k.Field<string>("Value4Amount"))),
GeneralDescription1 = m.First().Field<string>("GenDesc1"),
GeneralDescription2 = m.First().Field<string>("GenDesc2"),
GeneralDescription3 = m.First().Field<string>("GenDesc3"),
GeneralDescription4 = m.First().Field<string>("GenDesc4"),
GeneralDescription5 = m.First().Field<string>("GenDesc5"),
GeneralDescription6 = m.First().Field<string>("GenDesc6"),
GeneralDescription7 = m.First().Field<string>("GenDesc7"),
GeneralDescription8 = m.First().Field<string>("GenDesc8"),
GeneralDescription9 = m.First().Field<string>("GenDesc9"),
GeneralDescription10 = m.First().Field<string>("GenDesc10"),
GeneralDescription11 = m.First().Field<string>("GenDesc11"),
GeneralDescription12 = m.First().Field<string>("GenDesc12"),
GeneralDescription13 = m.First().Field<string>("GenDesc13"),
GeneralDescription14 = m.First().Field<string>("GenDesc14"),
GeneralDescription15 = m.First().Field<string>("GenDesc15"),
GeneralDescription16 = m.First().Field<string>("GenDesc16"),
GeneralDescription17 = m.First().Field<string>("GenDesc17"),
GeneralDescription18 = m.First().Field<string>("GenDesc18"),
GeneralDescription19 = m.First().Field<string>("GenDesc19"),
GeneralDescription20 = m.First().Field<string>("GenDesc20"),
GeneralDescription21 = m.First().Field<string>("GenDesc21"),
GeneralDescription22 = m.First().Field<string>("GenDesc22"),
GeneralDescription23 = m.First().Field<string>("GenDesc23"),
GeneralDescription24 = m.First().Field<string>("GenDesc24"),
GeneralDescription25 = m.First().Field<string>("GenDesc25"),
rowcount = m.Count(),
};
foreach (var employee in query)
{
Specialist re = new Specialist();
re.Reference = employee.Reference;
re.SaveReference = employee.SaveReference;
re.populatecellwithJN = employee.populatecellwithJN;
re.BalanceBy = employee.BalanceBy;
re.Ledger = employee.Ledger;
re.AccountCode = employee.AccountCode;
re.AccountingPeriod = employee.AccountingPeriod;
re.TransactionDate = employee.TransactionDate;
re.DueDate = employee.DueDate;
re.JournalType = employee.JournalType;
re.JournalSource = employee.JournalSource;
re.TransactionReference = employee.TransactionReference;
re.Description = employee.Description;
re.AllocationMarker = employee.AllocationMarker;
re.AnalysisCode1 = employee.AnalysisCode1;
re.AnalysisCode2 = employee.AnalysisCode2;
re.AnalysisCode3 = employee.AnalysisCode3;
re.AnalysisCode4 = employee.AnalysisCode4;
re.AnalysisCode5 = employee.AnalysisCode5;
re.AnalysisCode6 = employee.AnalysisCode6;
re.AnalysisCode7 = employee.AnalysisCode7;
re.AnalysisCode8 = employee.AnalysisCode8;
re.AnalysisCode9 = employee.AnalysisCode9;
re.AnalysisCode10 = employee.AnalysisCode10;
re.GenDesc1 = employee.GeneralDescription1;
re.GenDesc2 = employee.GeneralDescription2;
re.GenDesc3 = employee.GeneralDescription3;
re.GenDesc4 = employee.GeneralDescription4;
re.GenDesc5 = employee.GeneralDescription5;
re.GenDesc6 = employee.GeneralDescription6;
re.GenDesc7 = employee.GeneralDescription7;
re.GenDesc8 = employee.GeneralDescription8;
re.GenDesc9 = employee.GeneralDescription9;
re.GenDesc10 = employee.GeneralDescription10;
re.GenDesc11 = employee.GeneralDescription11;
re.GenDesc12 = employee.GeneralDescription12;
re.GenDesc13 = employee.GeneralDescription13;
re.GenDesc14 = employee.GeneralDescription14;
re.GenDesc15 = employee.GeneralDescription15;
re.GenDesc16 = employee.GeneralDescription16;
re.GenDesc17 = employee.GeneralDescription17;
re.GenDesc18 = employee.GeneralDescription18;
re.GenDesc19 = employee.GeneralDescription19;
re.GenDesc20 = employee.GeneralDescription20;
re.GenDesc21 = employee.GeneralDescription21;
re.GenDesc22 = employee.GeneralDescription22;
re.GenDesc23 = employee.GeneralDescription23;
re.GenDesc24 = employee.GeneralDescription24;
re.GenDesc25 = employee.GeneralDescription25;
re.TransactionAmount = employee.TransactionAmount.ToString();
re.CurrencyCode = employee.CurrencyCode;
re.DebitCredit = "C";
re.BaseAmount = employee.BaseAmount.ToString();
re.Base2ReportingAmount = employee.Base2ReportingAmount.ToString();
re.Value4Amount = employee.Value4Amount.ToString();
finallist.Add(re);
}
}
/// <summary>
///
/// </summary>
/// <param name="dt"></param>
/// <param name="NameValueCollection"></param>
/// <param name="refnum"></param>
/// <param name="dtcount"></param>
private void BindConsolidationDGV(ref DataTable dt, ref List<KeyValuePair<int, string>> NameValueCollection, string refnum, int dtcount)
{
try
{
DataTable dt2 = ft.GetLineDetailDataFromDB("1", refnum);
for (int i = 0; i < dt2.Rows.Count; i++)
{
if (dt.Rows.Count == 0)
{
dt = DataConversionTools.ConvertToDataTableStructure<rsTemplateJournal>();
}
DataRow dr = dt.NewRow();
for (int j = 0; j < dt.Columns.Count; j++)
{
dr[dt.Columns[j].ColumnName] = dt2.Rows[i][dt.Columns[j].ColumnName];
}
dt.Rows.Add(dr);
if (!string.IsNullOrEmpty(dt2.Rows[i]["ConsolidateBy1"].ToString()))
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, dt2.Rows[i]["ConsolidateBy1"].ToString()));
else
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, ""));
if (!string.IsNullOrEmpty(dt2.Rows[i]["ConsolidateBy2"].ToString()))
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, dt2.Rows[i]["ConsolidateBy2"].ToString()));
else
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, ""));
if (!string.IsNullOrEmpty(dt2.Rows[i]["ConsolidateBy3"].ToString()))
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, dt2.Rows[i]["ConsolidateBy3"].ToString()));
else
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, ""));
if (!string.IsNullOrEmpty(dt2.Rows[i]["ConsolidateBy4"].ToString()))
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, dt2.Rows[i]["ConsolidateBy4"].ToString()));
else
NameValueCollection.Add(new KeyValuePair<int, string>(dtcount + i, ""));
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Output settings error");
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + "Output settings error");
}
}
/// <summary>
///
/// </summary>
/// <param name="dt"></param>
/// <param name="NameValueUpdate"></param>
/// <param name="refnum"></param>
/// <param name="dtcount"></param>
private void BindUpdateDGV(ref DataTable dt, ref List<KeyValuePair<int, string>> NameValueUpdate, string refnum, int dtcount)
{
try
{
DataTable dt2 = ft.GetLineDetailDataFromDB("2", refnum);
for (int i = 0; i < dt2.Rows.Count; i++)
{
if (dt.Rows.Count == 0)
{
dt = DataConversionTools.ConvertToDataTableStructure<rsTemplateJournal>();
}
DataRow dr = dt.NewRow();
for (int j = 0; j < dt.Columns.Count; j++)
{
dr[dt.Columns[j].ColumnName] = dt2.Rows[i][dt.Columns[j].ColumnName];
}
dt.Rows.Add(dr);
string updateStr = dt2.Rows[i]["updateFields"].ToString();
string[] sUpdateArray = Regex.Split(updateStr, ",", RegexOptions.IgnoreCase);
if (!string.IsNullOrEmpty(updateStr))
foreach (string s in sUpdateArray)
{
NameValueUpdate.Add(new KeyValuePair<int, string>(dtcount + i, s));
}
}
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Output settings error");
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + "Output settings error");
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void button3_Click(object sender, EventArgs e)
{
DataFieldsSetting dfs = new DataFieldsSetting("Gen");
dfs.ShowDialog();
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
public void btnSaveCriteria_Click(object sender, EventArgs e)
{
SqlConnection conn = null;
try
{
conn = new
SqlConnection(ConfigurationManager.ConnectionStrings["conRsTool"].ConnectionString.ToString());
conn.Open();
SqlCommand cmd = new SqlCommand("rsTemplateSetting_Del", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd.ExecuteNonQuery();
for (int i = 0; i < this.dgvSaveOptions.Rows.Count; i++)
{
string isEmptyStr = string.Empty;
for (int k = 0; k < dgvSaveOptions.Rows[i].Cells.Count; k++)
{
isEmptyStr += dgvSaveOptions.Rows[i].Cells[k].EditedFormattedValue;
}
if (string.IsNullOrEmpty(isEmptyStr)) continue;
string sequencePrifx = string.Empty;
string pupulateWithSN = string.Empty;
SqlCommand cmd2 = new SqlCommand("rsTemplateSetting_Ins", conn);
cmd2.CommandType = CommandType.StoredProcedure;
cmd2.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
sequencePrifx = this.dgvSaveOptions.Rows[i].Cells[12].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[12].Value.ToString();
if (string.IsNullOrEmpty(sequencePrifx))
this.dgvSaveOptions.Rows[i].Cells[12].ErrorText = "nullable, but unexpected result would happen when save transaction/view transaction.";
else
this.dgvSaveOptions.Rows[i].Cells[12].ErrorText = string.Empty;
pupulateWithSN = this.dgvSaveOptions.Rows[i].Cells[13].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[13].Value.ToString();
if (string.IsNullOrEmpty(pupulateWithSN))
this.dgvSaveOptions.Rows[i].Cells[13].ErrorText = "nullable, but unexpected result would happen when save transaction/view transaction.";
else
this.dgvSaveOptions.Rows[i].Cells[13].ErrorText = string.Empty;
cmd2.Parameters.Add(new SqlParameter("@CriteriaName1", this.dgvSaveOptions.Rows[i].Cells[1].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[1].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CellReference1", this.dgvSaveOptions.Rows[i].Cells[2].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[2].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CriteriaName2", this.dgvSaveOptions.Rows[i].Cells[3].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[3].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CellReference2", this.dgvSaveOptions.Rows[i].Cells[4].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[4].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CriteriaName3", this.dgvSaveOptions.Rows[i].Cells[5].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[5].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CellReference3", this.dgvSaveOptions.Rows[i].Cells[6].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[6].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CriteriaName4", this.dgvSaveOptions.Rows[i].Cells[7].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[7].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CellReference4", this.dgvSaveOptions.Rows[i].Cells[8].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[8].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CriteriaName5", this.dgvSaveOptions.Rows[i].Cells[9].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[9].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@CellReference5", this.dgvSaveOptions.Rows[i].Cells[10].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[10].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@OpenTransUponSave", this.dgvSaveOptions.Rows[i].Cells[11].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[11].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@Reference", this.dgvSaveOptions.Rows[i].Cells[0].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[0].Value.ToString()));
if (!string.IsNullOrEmpty(sequencePrifx))
cmd2.Parameters.Add(new SqlParameter("@UseSequenceNumbering", true));
else
cmd2.Parameters.Add(new SqlParameter("@UseSequenceNumbering", false));
cmd2.Parameters.Add(new SqlParameter("@SequencePrefix", sequencePrifx));
cmd2.Parameters.Add(new SqlParameter("@PopulateCell", pupulateWithSN));
cmd2.Parameters.Add(new SqlParameter("@PDFFolder", this.dgvSaveOptions.Rows[i].Cells[14].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[14].Value.ToString()));
cmd2.Parameters.Add(new SqlParameter("@PDFName", this.dgvSaveOptions.Rows[i].Cells[15].Value == null ? "" : this.dgvSaveOptions.Rows[i].Cells[15].Value.ToString()));
cmd2.ExecuteNonQuery();
cmd2.Parameters.Clear();
}
}
finally
{
if (conn != null)
{
conn.Close();
}
}
if (sender != null)
Ribbon2._MyOutputCustomTaskPane.Visible = false;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="ws"></param>
public void SaveTransUpd(object sender, Microsoft.Office.Interop.Excel.Worksheet ws)
{
searchStatus = string.Empty;
string cellName = string.Empty;
updateStatus = string.Empty;
updateStatusForPost = string.Empty;
List<string> LineIndicatorList = new List<string>();
List<string> startInCellList = new List<string>();
if (!string.IsNullOrEmpty(SessionInfo.UserInfo.File_ftid))
{
SqlConnection conn = null;
SqlDataReader rdr = null;
try
{
conn = new SqlConnection(ConfigurationManager.ConnectionStrings["conRsTool"].ConnectionString.ToString());
conn.Open();
SqlCommand cmd = new SqlCommand("rsTemplateJournal_Del", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd.Parameters.Add(new SqlParameter("@Type", "2"));
rdr = cmd.ExecuteReader();
rdr.Close();
TransUpdFinallist.Clear();
SqlCommand cmd2 = new SqlCommand("rsTemplateJournal_Ins", conn);
cmd2.CommandType = CommandType.StoredProcedure;
for (int j = 0; j < dgvLD.Rows.Count; j++)
{
bool iscontinue = true;
searchStatus = "";
updateStatus = "";
for (int i = 0; i < dgvLD.Columns.Count; i++)
{
if (dgvLD.Rows[j].Cells[i].Style.BackColor == Color.Aqua)
{
iscontinue = false;
updateStatus += dgvLD.Columns[i].Name + ",";
updateStatusForPost += dgvLD.Columns[i].Name + ",";
}
}
if (iscontinue)
continue;
string isEmptyStr = string.Empty;
for (int k = 0; k < dgvLD.Rows[j].Cells.Count; k++)
{
isEmptyStr += dgvLD.Rows[j].Cells[k].EditedFormattedValue;
if (k > 6 && (dgvLD.Rows[j].Cells[k].Style.BackColor != Color.Aqua))
{
object tmp = dgvLD.Rows[j].Cells[k].EditedFormattedValue;
if (!string.IsNullOrEmpty(tmp.ToString()))
searchStatus += dgvLD.Columns[k].DataPropertyName + ",";
}
}
if (string.IsNullOrEmpty(isEmptyStr)) continue;
ExcelAddIn4.Common2.Specialist re = new ExcelAddIn4.Common2.Specialist();
re.Actions = new Common2.Actions();
string startCell = string.Empty;
string journalN = string.Empty;
string journalLN = string.Empty;
string abt = string.Empty;
string apsa = string.Empty;
re.Reference = this.dgvLD.Rows[j].Cells[0].Value == null ? "" : this.dgvLD.Rows[j].Cells[0].Value.ToString().Replace(" ", "");
re.SaveReference = this.dgvLD.Rows[j].Cells[1].Value == null ? "" : this.dgvLD.Rows[j].Cells[1].Value.ToString();
re.BalanceBy = this.dgvLD.Rows[j].Cells[2].Value == null ? "" : this.dgvLD.Rows[j].Cells[2].Value.ToString();
abt = this.dgvLD.Rows[j].Cells[3].Value == null ? "" : this.dgvLD.Rows[j].Cells[3].Value.ToString();
apsa = this.dgvLD.Rows[j].Cells[4].Value == null ? "" : this.dgvLD.Rows[j].Cells[4].Value.ToString();
re.LineIndicator = this.dgvLD.Rows[j].Cells[5].Value == null ? "" : this.dgvLD.Rows[j].Cells[5].Value.ToString();
startCell = this.dgvLD.Rows[j].Cells[6].Value == null ? "" : this.dgvLD.Rows[j].Cells[6].Value.ToString();
re.StartInCell = startCell;
re.populatecellwithJN = this.dgvLD.Rows[j].Cells[7].Value == null ? "" : this.dgvLD.Rows[j].Cells[7].Value.ToString();
journalN = this.dgvLD.Rows[j].Cells[8].Value == null ? "" : this.dgvLD.Rows[j].Cells[8].Value.ToString();
re.JournalNumber = journalN;
journalLN = this.dgvLD.Rows[j].Cells[9].Value == null ? "" : this.dgvLD.Rows[j].Cells[9].Value.ToString();
re.JournalLineNumber = journalLN;
re.Ledger = this.dgvLD.Rows[j].Cells[10].Value == null ? "" : this.dgvLD.Rows[j].Cells[10].Value.ToString();
re.AccountCode = this.dgvLD.Rows[j].Cells[11].Value == null ? "" : this.dgvLD.Rows[j].Cells[11].Value.ToString();
re.AccountingPeriod = this.dgvLD.Rows[j].Cells[12].Value == null ? "" : this.dgvLD.Rows[j].Cells[12].Value.ToString();
re.TransactionDate = this.dgvLD.Rows[j].Cells[13].Value == null ? "" : this.dgvLD.Rows[j].Cells[13].Value.ToString();
re.DueDate = this.dgvLD.Rows[j].Cells[14].Value == null ? "" : this.dgvLD.Rows[j].Cells[14].Value.ToString();
re.JournalType = this.dgvLD.Rows[j].Cells[15].Value == null ? "" : this.dgvLD.Rows[j].Cells[15].Value.ToString();
re.JournalSource = this.dgvLD.Rows[j].Cells[16].Value == null ? "" : this.dgvLD.Rows[j].Cells[16].Value.ToString();
re.TransactionReference = this.dgvLD.Rows[j].Cells[17].Value == null ? "" : this.dgvLD.Rows[j].Cells[17].Value.ToString();
re.Description = this.dgvLD.Rows[j].Cells[18].Value == null ? "" : this.dgvLD.Rows[j].Cells[18].Value.ToString();
re.AllocationMarker = this.dgvLD.Rows[j].Cells[19].Value == null ? "" : this.dgvLD.Rows[j].Cells[19].Value.ToString();
re.AnalysisCode1 = this.dgvLD.Rows[j].Cells[20].Value == null ? "" : this.dgvLD.Rows[j].Cells[20].Value.ToString();
re.AnalysisCode2 = this.dgvLD.Rows[j].Cells[21].Value == null ? "" : this.dgvLD.Rows[j].Cells[21].Value.ToString();
re.AnalysisCode3 = this.dgvLD.Rows[j].Cells[22].Value == null ? "" : this.dgvLD.Rows[j].Cells[22].Value.ToString();
re.AnalysisCode4 = this.dgvLD.Rows[j].Cells[23].Value == null ? "" : this.dgvLD.Rows[j].Cells[23].Value.ToString();
re.AnalysisCode5 = this.dgvLD.Rows[j].Cells[24].Value == null ? "" : this.dgvLD.Rows[j].Cells[24].Value.ToString();
re.AnalysisCode6 = this.dgvLD.Rows[j].Cells[25].Value == null ? "" : this.dgvLD.Rows[j].Cells[25].Value.ToString();
re.AnalysisCode7 = this.dgvLD.Rows[j].Cells[26].Value == null ? "" : this.dgvLD.Rows[j].Cells[26].Value.ToString();
re.AnalysisCode8 = this.dgvLD.Rows[j].Cells[27].Value == null ? "" : this.dgvLD.Rows[j].Cells[27].Value.ToString();
re.AnalysisCode9 = this.dgvLD.Rows[j].Cells[28].Value == null ? "" : this.dgvLD.Rows[j].Cells[28].Value.ToString();
re.AnalysisCode10 = this.dgvLD.Rows[j].Cells[29].Value == null ? "" : this.dgvLD.Rows[j].Cells[29].Value.ToString();
re.GenDesc1 = this.dgvLD.Rows[j].Cells[30].Value == null ? "" : this.dgvLD.Rows[j].Cells[30].Value.ToString();
re.GenDesc2 = this.dgvLD.Rows[j].Cells[31].Value == null ? "" : this.dgvLD.Rows[j].Cells[31].Value.ToString();
re.GenDesc3 = this.dgvLD.Rows[j].Cells[32].Value == null ? "" : this.dgvLD.Rows[j].Cells[32].Value.ToString();
re.GenDesc4 = this.dgvLD.Rows[j].Cells[33].Value == null ? "" : this.dgvLD.Rows[j].Cells[33].Value.ToString();
re.GenDesc5 = this.dgvLD.Rows[j].Cells[34].Value == null ? "" : this.dgvLD.Rows[j].Cells[34].Value.ToString();
re.GenDesc6 = this.dgvLD.Rows[j].Cells[35].Value == null ? "" : this.dgvLD.Rows[j].Cells[35].Value.ToString();
re.GenDesc7 = this.dgvLD.Rows[j].Cells[36].Value == null ? "" : this.dgvLD.Rows[j].Cells[36].Value.ToString();
re.GenDesc8 = this.dgvLD.Rows[j].Cells[37].Value == null ? "" : this.dgvLD.Rows[j].Cells[37].Value.ToString();
re.GenDesc9 = this.dgvLD.Rows[j].Cells[38].Value == null ? "" : this.dgvLD.Rows[j].Cells[38].Value.ToString();
re.GenDesc10 = this.dgvLD.Rows[j].Cells[39].Value == null ? "" : this.dgvLD.Rows[j].Cells[39].Value.ToString();
re.GenDesc11 = this.dgvLD.Rows[j].Cells[40].Value == null ? "" : this.dgvLD.Rows[j].Cells[40].Value.ToString();
re.GenDesc12 = this.dgvLD.Rows[j].Cells[41].Value == null ? "" : this.dgvLD.Rows[j].Cells[41].Value.ToString();
re.GenDesc13 = this.dgvLD.Rows[j].Cells[42].Value == null ? "" : this.dgvLD.Rows[j].Cells[42].Value.ToString();
re.GenDesc14 = this.dgvLD.Rows[j].Cells[43].Value == null ? "" : this.dgvLD.Rows[j].Cells[43].Value.ToString();
re.GenDesc15 = this.dgvLD.Rows[j].Cells[44].Value == null ? "" : this.dgvLD.Rows[j].Cells[44].Value.ToString();
re.GenDesc16 = this.dgvLD.Rows[j].Cells[45].Value == null ? "" : this.dgvLD.Rows[j].Cells[45].Value.ToString();
re.GenDesc17 = this.dgvLD.Rows[j].Cells[46].Value == null ? "" : this.dgvLD.Rows[j].Cells[46].Value.ToString();
re.GenDesc18 = this.dgvLD.Rows[j].Cells[47].Value == null ? "" : this.dgvLD.Rows[j].Cells[47].Value.ToString();
re.GenDesc19 = this.dgvLD.Rows[j].Cells[48].Value == null ? "" : this.dgvLD.Rows[j].Cells[48].Value.ToString();
re.GenDesc20 = this.dgvLD.Rows[j].Cells[49].Value == null ? "" : this.dgvLD.Rows[j].Cells[49].Value.ToString();
re.GenDesc21 = this.dgvLD.Rows[j].Cells[50].Value == null ? "" : this.dgvLD.Rows[j].Cells[50].Value.ToString();
re.GenDesc22 = this.dgvLD.Rows[j].Cells[51].Value == null ? "" : this.dgvLD.Rows[j].Cells[51].Value.ToString();
re.GenDesc23 = this.dgvLD.Rows[j].Cells[52].Value == null ? "" : this.dgvLD.Rows[j].Cells[52].Value.ToString();
re.GenDesc24 = this.dgvLD.Rows[j].Cells[53].Value == null ? "" : this.dgvLD.Rows[j].Cells[53].Value.ToString();
re.GenDesc25 = this.dgvLD.Rows[j].Cells[54].Value == null ? "" : this.dgvLD.Rows[j].Cells[54].Value.ToString();
re.TransactionAmount = this.dgvLD.Rows[j].Cells[55].Value == null ? "" : this.dgvLD.Rows[j].Cells[55].Value.ToString();
re.CurrencyCode = this.dgvLD.Rows[j].Cells[56].Value == null ? "" : this.dgvLD.Rows[j].Cells[56].Value.ToString();
re.DebitCredit = "";
re.BaseAmount = this.dgvLD.Rows[j].Cells[57].Value == null ? "" : this.dgvLD.Rows[j].Cells[57].Value.ToString();
re.Base2ReportingAmount = this.dgvLD.Rows[j].Cells[58].Value == null ? "" : this.dgvLD.Rows[j].Cells[58].Value.ToString();
re.Value4Amount = this.dgvLD.Rows[j].Cells[59].Value == null ? "" : this.dgvLD.Rows[j].Cells[59].Value.ToString();
if (string.IsNullOrEmpty(re.ToString())) continue;
if (string.IsNullOrEmpty(re.LineIndicator))
this.dgvLD.Rows[j].Cells[5].ErrorText = "Not null.";
else
this.dgvLD.Rows[j].Cells[5].ErrorText = string.Empty;
cmd2.Parameters.Add(new SqlParameter("@Ledger", re.Ledger));
cmd2.Parameters.Add(new SqlParameter("@ft_Account", re.AccountCode));
cmd2.Parameters.Add(new SqlParameter("@Period", re.AccountingPeriod));
cmd2.Parameters.Add(new SqlParameter("@TransDate", re.TransactionDate));
cmd2.Parameters.Add(new SqlParameter("@DueDate", re.DueDate));
cmd2.Parameters.Add(new SqlParameter("@JrnlType", re.JournalType));
cmd2.Parameters.Add(new SqlParameter("@JrnlSource", re.JournalSource));
cmd2.Parameters.Add(new SqlParameter("@TransRef", re.TransactionReference));
cmd2.Parameters.Add(new SqlParameter("@Description", re.Description));
cmd2.Parameters.Add(new SqlParameter("@AlloctnMarker", re.AllocationMarker));
cmd2.Parameters.Add(new SqlParameter("@LA1", re.AnalysisCode1));
cmd2.Parameters.Add(new SqlParameter("@LA2", re.AnalysisCode2));
cmd2.Parameters.Add(new SqlParameter("@LA3", re.AnalysisCode3));
cmd2.Parameters.Add(new SqlParameter("@LA4", re.AnalysisCode4));
cmd2.Parameters.Add(new SqlParameter("@LA5", re.AnalysisCode5));
cmd2.Parameters.Add(new SqlParameter("@LA6", re.AnalysisCode6));
cmd2.Parameters.Add(new SqlParameter("@LA7", re.AnalysisCode7));
cmd2.Parameters.Add(new SqlParameter("@LA8", re.AnalysisCode8));
cmd2.Parameters.Add(new SqlParameter("@LA9", re.AnalysisCode9));
cmd2.Parameters.Add(new SqlParameter("@LA10", re.AnalysisCode10));
cmd2.Parameters.Add(new SqlParameter("@GenDesc1", re.GenDesc1));
cmd2.Parameters.Add(new SqlParameter("@GenDesc2", re.GenDesc2));
cmd2.Parameters.Add(new SqlParameter("@GenDesc3", re.GenDesc3));
cmd2.Parameters.Add(new SqlParameter("@GenDesc4", re.GenDesc4));
cmd2.Parameters.Add(new SqlParameter("@GenDesc5", re.GenDesc5));
cmd2.Parameters.Add(new SqlParameter("@GenDesc6", re.GenDesc6));
cmd2.Parameters.Add(new SqlParameter("@GenDesc7", re.GenDesc7));
cmd2.Parameters.Add(new SqlParameter("@GenDesc8", re.GenDesc8));
cmd2.Parameters.Add(new SqlParameter("@GenDesc9", re.GenDesc9));
cmd2.Parameters.Add(new SqlParameter("@GenDesc10", re.GenDesc10));
cmd2.Parameters.Add(new SqlParameter("@GenDesc11", re.GenDesc11));
cmd2.Parameters.Add(new SqlParameter("@GenDesc12", re.GenDesc12));
cmd2.Parameters.Add(new SqlParameter("@GenDesc13", re.GenDesc13));
cmd2.Parameters.Add(new SqlParameter("@GenDesc14", re.GenDesc14));
cmd2.Parameters.Add(new SqlParameter("@GenDesc15", re.GenDesc15));
cmd2.Parameters.Add(new SqlParameter("@GenDesc16", re.GenDesc16));
cmd2.Parameters.Add(new SqlParameter("@GenDesc17", re.GenDesc17));
cmd2.Parameters.Add(new SqlParameter("@GenDesc18", re.GenDesc18));
cmd2.Parameters.Add(new SqlParameter("@GenDesc19", re.GenDesc19));
cmd2.Parameters.Add(new SqlParameter("@GenDesc20", re.GenDesc20));
cmd2.Parameters.Add(new SqlParameter("@GenDesc21", re.GenDesc21));
cmd2.Parameters.Add(new SqlParameter("@GenDesc22", re.GenDesc22));
cmd2.Parameters.Add(new SqlParameter("@GenDesc23", re.GenDesc23));
cmd2.Parameters.Add(new SqlParameter("@GenDesc24", re.GenDesc24));
cmd2.Parameters.Add(new SqlParameter("@GenDesc25", re.GenDesc25));
cmd2.Parameters.Add(new SqlParameter("@TransAmount", re.TransactionAmount));
cmd2.Parameters.Add(new SqlParameter("@Currency", re.CurrencyCode));
cmd2.Parameters.Add(new SqlParameter("@BaseAmount", re.BaseAmount));
cmd2.Parameters.Add(new SqlParameter("@2ndBase", re.Base2ReportingAmount));
cmd2.Parameters.Add(new SqlParameter("@4thAmount", re.Value4Amount));
cmd2.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd2.Parameters.Add(new SqlParameter("@LineIndicator", re.LineIndicator));
cmd2.Parameters.Add(new SqlParameter("@StartinginCell", startCell));
cmd2.Parameters.Add(new SqlParameter("@BalanceBy", re.BalanceBy));
cmd2.Parameters.Add(new SqlParameter("@PopWithJNNumber", re.populatecellwithJN));
cmd2.Parameters.Add(new SqlParameter("@Reference", re.Reference));
cmd2.Parameters.Add(new SqlParameter("@SaveReference", re.SaveReference));
cmd2.Parameters.Add(new SqlParameter("@JournalNumber", journalN));
cmd2.Parameters.Add(new SqlParameter("@JournalLineNumber", journalLN));
cmd2.Parameters.Add(new SqlParameter("@InputFields", searchStatus));
cmd2.Parameters.Add(new SqlParameter("@UpdateFields", updateStatus));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy1", ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy2", ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy3", ""));
cmd2.Parameters.Add(new SqlParameter("@ConsolidateBy4", ""));
cmd2.Parameters.Add(new SqlParameter("@Type", "2"));//0.post 1.consolidation 2.update
cmd2.Parameters.Add(new SqlParameter("@AllowBalTrans", abt));
cmd2.Parameters.Add(new SqlParameter("@AllowPostSuspAcco", apsa));
rdr = cmd2.ExecuteReader();
rdr.Close();
cmd2.Parameters.Clear();
if (!string.IsNullOrEmpty(re.LineIndicator) && !string.IsNullOrEmpty(startCell))
{
TransUpdFinallist.Add(re);
LineIndicatorList.Add(re.LineIndicator);
startInCellList.Add(startCell);
}
}
this.Invalidate();
//if (LineIndicatorList.Count == 0 && TransUpdFinallist.Count != 0)
//{
//throw new Exception("No data found for specified Line Indicator(s)!");
//}
if (sender == null)
AddLineDetailEntityListIntoFinalListForTransUpd(TransUpdFinallist, LineIndicatorList, startInCellList, ws, ref TransUpdFinallist);
}
catch (Exception ex)
{
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + " - Data Error in Journal tab, Output settings !" + " Output settings error");
throw new Exception(ex.Message + " - Data Error in Journal tab, Output settings !");
}
finally
{
if (conn != null)
{
conn.Close();
}
if (rdr != null)
{
rdr.Close();
}
}
}
Ribbon2._MyOutputCustomTaskPane.Visible = false;
}
/// <summary>
///
/// </summary>
/// <returns></returns>
private bool isTransUpdate()
{
bool isUpdate = false;
for (int i = 0; i < dgvLD.Columns.Count; i++)
if (dgvLD.SelectedRows[0].Cells[i].Style.BackColor == Color.Aqua)
isUpdate = true;
return isUpdate;
}
/// <summary>
///
/// </summary>
private void ClearSession()
{
SessionInfo.UserInfo.Criteria1 = "";
SessionInfo.UserInfo.Criteria2 = "";
SessionInfo.UserInfo.Criteria3 = "";
SessionInfo.UserInfo.Criteria4 = "";
SessionInfo.UserInfo.Criteria5 = "";
SessionInfo.UserInfo.CellReference1 = "";
SessionInfo.UserInfo.CellReference2 = "";
SessionInfo.UserInfo.CellReference3 = "";
SessionInfo.UserInfo.CellReference4 = "";
SessionInfo.UserInfo.CellReference5 = "";
SessionInfo.UserInfo.UseCriteria = false;
SessionInfo.UserInfo.OpentransuponSave = false;
SessionInfo.UserInfo.SequencePrefix = "";
SessionInfo.UserInfo.UseSequenceNumbering = "0";
SessionInfo.UserInfo.PopulateCell = "";
}
/// <summary>
///
/// </summary>
public void SetSession()
{
if (string.IsNullOrEmpty(SessionInfo.UserInfo.CurrentSaveRef)) return;
ClearSession();
DataTable dt = ft.GetReportCriteriaByRef(SessionInfo.UserInfo.File_ftid, SessionInfo.UserInfo.CurrentSaveRef);
if (dt.Rows.Count == 0)
{
MessageBox.Show("Save Reference " + SessionInfo.UserInfo.CurrentSaveRef + " error!"); return;
}
SessionInfo.UserInfo.OpentransuponSave = (bool)dt.Rows[0]["OpenTransUponSave"];
for (int i = 0; i < dt.Rows.Count; i++)
{
try
{
if (!string.IsNullOrEmpty(dt.Rows[i]["SequencePrefix"].ToString()))
{
SessionInfo.UserInfo.SequencePrefix = dt.Rows[i]["SequencePrefix"].ToString();
if (!string.IsNullOrEmpty(SessionInfo.UserInfo.SequencePrefix))
SessionInfo.UserInfo.UseSequenceNumbering = "1";
else
SessionInfo.UserInfo.UseSequenceNumbering = "0";
}
if (!string.IsNullOrEmpty(dt.Rows[i]["PopulateCell"].ToString()))
SessionInfo.UserInfo.PopulateCell = dt.Rows[i]["PopulateCell"].ToString();
if (!string.IsNullOrEmpty(dt.Rows[i]["CriteriaName1"].ToString()))
{
SessionInfo.UserInfo.Criteria1 = dt.Rows[i]["CriteriaName1"].ToString();
var cellvalue = "";
try
{
cellvalue = ft.GetValueOfAddress(dt.Rows[i]["CellReference1"].ToString());
}
catch { cellvalue = dt.Rows[i]["CellReference1"].ToString(); }
SessionInfo.UserInfo.CellReference1 = cellvalue;
SessionInfo.UserInfo.UseCriteria = true;
}
else
{
SessionInfo.UserInfo.Criteria1 = "";
SessionInfo.UserInfo.CellReference1 = "";
}
if (!string.IsNullOrEmpty(dt.Rows[i]["CriteriaName2"].ToString()))
{
SessionInfo.UserInfo.Criteria2 = dt.Rows[i]["CriteriaName2"].ToString();
var cellvalue = "";
try
{
cellvalue = ft.GetValueOfAddress(dt.Rows[i]["CellReference2"].ToString());
}
catch { cellvalue = dt.Rows[i]["CellReference2"].ToString(); }
SessionInfo.UserInfo.CellReference2 = cellvalue;
SessionInfo.UserInfo.UseCriteria = true;
}
else
{
SessionInfo.UserInfo.Criteria2 = "";
SessionInfo.UserInfo.CellReference2 = "";
}
if (!string.IsNullOrEmpty(dt.Rows[i]["CriteriaName3"].ToString()))
{
SessionInfo.UserInfo.Criteria3 = dt.Rows[i]["CriteriaName3"].ToString();
var cellvalue = "";
try
{
cellvalue = ft.GetValueOfAddress(dt.Rows[i]["CellReference3"].ToString());
}
catch { cellvalue = dt.Rows[i]["CellReference3"].ToString(); }
SessionInfo.UserInfo.CellReference3 = cellvalue;
SessionInfo.UserInfo.UseCriteria = true;
}
else
{
SessionInfo.UserInfo.Criteria3 = "";
SessionInfo.UserInfo.CellReference3 = "";
}
if (!string.IsNullOrEmpty(dt.Rows[i]["CriteriaName4"].ToString()))
{
SessionInfo.UserInfo.Criteria4 = dt.Rows[i]["CriteriaName4"].ToString();
var cellvalue = "";
try
{
cellvalue = ft.GetValueOfAddress(dt.Rows[i]["CellReference4"].ToString());
}
catch { cellvalue = dt.Rows[i]["CellReference4"].ToString(); }
SessionInfo.UserInfo.CellReference4 = cellvalue;
SessionInfo.UserInfo.UseCriteria = true;
}
else
{
SessionInfo.UserInfo.Criteria4 = "";
SessionInfo.UserInfo.CellReference4 = "";
}
if (!string.IsNullOrEmpty(dt.Rows[i]["CriteriaName5"].ToString()))
{
SessionInfo.UserInfo.Criteria5 = dt.Rows[i]["CriteriaName5"].ToString();
var cellvalue = "";
try
{
cellvalue = ft.GetValueOfAddress(dt.Rows[i]["CellReference5"].ToString());
}
catch { cellvalue = dt.Rows[i]["CellReference5"].ToString(); }
SessionInfo.UserInfo.CellReference5 = cellvalue;
SessionInfo.UserInfo.UseCriteria = true;
}
else
{
SessionInfo.UserInfo.Criteria5 = "";
SessionInfo.UserInfo.CellReference5 = "";
}
}
catch { }
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnTestJournal_Click(object sender, EventArgs e)
{
if (dgvLD.SelectedRows.Count == 0)
{
MessageBox.Show("Please click the row header number ( before Ref column ) to choose a certain Reference number and enjoy your test ! ", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
return;
}
else
{
if (dgvLD.SelectedRows[0].Cells[0].Value != null)
SessionInfo.UserInfo.CurrentRef = dgvLD.SelectedRows[0].Cells[0].Value.ToString().Replace(" ", "");
if (dgvLD.SelectedRows[0].Cells[2].Value != null)
SessionInfo.UserInfo.BalanceBy = dgvLD.SelectedRows[0].Cells[2].Value.ToString().Replace(" ", "");
}
if (isTransUpdate())
{
btnTestTransUpd_Click(sender, null);
return;
}
if (ft.IsGUID(Path.GetFileNameWithoutExtension(SessionInfo.UserInfo.CachePath)) && !string.IsNullOrEmpty(ft.ProcessJournalNumber()))//(SessionInfo.UserInfo.CachePath != SessionInfo.UserInfo.FilePath)
{
MessageBox.Show("Can't be changed! This document has been Posted! ", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
LogHelper.WriteLog(typeof(OutputContainer), "Can't be changed! This document has been Posted! ");
return;
}
DateTime starttime = DateTime.Now;
if (Ribbon2.xpf != null)
Ribbon2.xpf.Dispose();
Ribbon2.xpf = new XMLPostFrm();
Ribbon2.xpf.panel3.Visible = true;
Ribbon2.xpf.panel1.Visible = false;
Ribbon2.xpf.panel2.Visible = false;
Ribbon2.xpf.panel3.Dock = DockStyle.Fill;
Ribbon2.xpf.ControlBox = false;
try
{
Globals.ThisAddIn.Application.DisplayAlerts = false;
Ribbon2.wsRrigin = (Microsoft.Office.Interop.Excel.Worksheet)Globals.ThisAddIn.Application.ActiveWorkbook.ActiveSheet;
var lastColumn = Ribbon2.wsRrigin.Cells.Find("*", Ribbon2.wsRrigin.Cells[1, 1], Microsoft.Office.Interop.Excel.XlFindLookIn.xlFormulas, Microsoft.Office.Interop.Excel.XlLookAt.xlPart, Microsoft.Office.Interop.Excel.XlSearchOrder.xlByColumns, Microsoft.Office.Interop.Excel.XlSearchDirection.xlPrevious, false, Type.Missing, Type.Missing);
Ribbon2.LastColumnName = Finance_Tools.RemoveNumber(lastColumn.Address).Replace("$", "");
Finance_Tools.MaxColumnCount = lastColumn.Column;
var lastrow = Ribbon2.wsRrigin.Cells.Find("*", Ribbon2.wsRrigin.Cells[1, 1], Microsoft.Office.Interop.Excel.XlFindLookIn.xlFormulas, Microsoft.Office.Interop.Excel.XlLookAt.xlPart, Microsoft.Office.Interop.Excel.XlSearchOrder.xlByRows, Microsoft.Office.Interop.Excel.XlSearchDirection.xlPrevious, false, Type.Missing, Type.Missing);
Ribbon2.LastRowNumber = Finance_Tools.RemoveNotNumber(lastrow.Address);
Globals.ThisAddIn.Application.ScreenUpdating = false;
Ribbon2.xpf.Show();
XMLPostFrm.richTextBox1.Text += "Error List :\r\n";
Save(null, Ribbon2.wsRrigin);
SaveCons(null, Ribbon2.wsRrigin);
SetSession();
Globals.ThisAddIn.Application.ScreenUpdating = true;
Globals.ThisAddIn.Application.DisplayAlerts = true;
if (XMLPostFrm.richTextBox1.Text.Length > 21)
{
LogHelper.WriteLog(typeof(OutputContainer), XMLPostFrm.richTextBox1.Text + " - Post Journal Processing error , Template:" + SessionInfo.UserInfo.FileName);
}
}
catch (Exception ex)
{
if (ex.ToString().Contains("Clipboard"))
{
MessageBox.Show("Clipboard not ready, please try again.", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
else
{
MessageBox.Show(ex.ToString(), "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + "Post error");
Ribbon2.xpf.Dispose();
Globals.ThisAddIn.Application.ScreenUpdating = true;
Clipboard.SetText("\r\n");
}
finally
{
Ribbon2.xpf.Focus();
Ribbon2.xpf.bddata();
Ribbon2.xpf.panel3.Visible = false;
Ribbon2.xpf.panel1.Visible = true;
Ribbon2.xpf.panel2.Visible = true;
Ribbon2.xpf.ControlBox = true;
DateTime stoptime = DateTime.Now;
string costtime = Finance_Tools.DateDiff(starttime, stoptime);
Globals.ThisAddIn.Application.StatusBar = "latest posting process costs " + costtime;
GC.Collect();
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void CTF_btnSave_Click(object sender, EventArgs e)
{
try
{
if (cbXMLOrText.Text == "XML")
SaveXML(sender, null);
else
SaveCTF(sender, null);
}
catch (Exception ex)
{
MessageBox.Show(ex.ToString(), "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
Ribbon2._MyOutputCustomTaskPane.Visible = false;
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="ws"></param>
public void SaveXML(object sender, Microsoft.Office.Interop.Excel.Worksheet ws)
{
string HeaderText = string.Empty;
string HeaderValue = string.Empty;
List<string> LineIndicatorList = new List<string>();
List<string> startInCellList = new List<string>();
for (int k = 0; k < dgvCreateTextFile.Columns.Count; k++)
{
HeaderText += dgvCreateTextFile.Columns[k].HeaderText + ",";
HeaderValue += dgvCreateTextFile.Columns[k].Name + ",";
}
if (!string.IsNullOrEmpty(SessionInfo.UserInfo.File_ftid))
{
SqlConnection conn = null;
SqlDataReader rdr = null;
try
{
conn = new
SqlConnection(ConfigurationManager.ConnectionStrings["conRsTool"].ConnectionString.ToString());
conn.Open();
SqlCommand cmd = new SqlCommand("rsTemplateXMLTextFileDGV_DelByComName", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd.Parameters.Add(new SqlParameter("@SunComponent", cbItems.Text.Substring(0, cbItems.Text.LastIndexOf(","))));
cmd.Parameters.Add(new SqlParameter("@SunMethod", cbItems.Text.Substring(cbItems.Text.LastIndexOf(",") + 1)));
rdr = cmd.ExecuteReader();
rdr.Close();
finallistCTF.Clear();
SqlCommand cmd2 = new SqlCommand("rsTemplateXMLTextFileDGV_Ins", conn);
cmd2.CommandType = CommandType.StoredProcedure;
for (int i = 0; i < this.dgvCreateTextFile.Rows.Count; i++)
{
string isEmptyStr = string.Empty;
for (int k = 0; k < dgvCreateTextFile.Rows[i].Cells.Count; k++)
{
isEmptyStr += dgvCreateTextFile.Rows[i].Cells[k].EditedFormattedValue;
}
if (string.IsNullOrEmpty(isEmptyStr)) continue;
RowCreateTextFile re = new RowCreateTextFile();
re.ReferenceNumber = this.dgvCreateTextFile.Rows[i].Cells[0].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[0].Value.ToString().Replace(" ", "");
re.LineIndicator = this.dgvCreateTextFile.Rows[i].Cells[1].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[1].Value.ToString();
re.StartinginCell = this.dgvCreateTextFile.Rows[i].Cells[2].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[2].Value.ToString();
re.Column1 = this.dgvCreateTextFile.Columns.Count > 3 ? (this.dgvCreateTextFile.Rows[i].Cells[3].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[3].EditedFormattedValue.ToString()) : "";
re.Column2 = this.dgvCreateTextFile.Columns.Count > 4 ? (this.dgvCreateTextFile.Rows[i].Cells[4].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[4].EditedFormattedValue.ToString()) : "";
re.Column3 = this.dgvCreateTextFile.Columns.Count > 5 ? (this.dgvCreateTextFile.Rows[i].Cells[5].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[5].EditedFormattedValue.ToString()) : "";
re.Column4 = this.dgvCreateTextFile.Columns.Count > 6 ? (this.dgvCreateTextFile.Rows[i].Cells[6].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[6].EditedFormattedValue.ToString()) : "";
re.Column5 = this.dgvCreateTextFile.Columns.Count > 7 ? (this.dgvCreateTextFile.Rows[i].Cells[7].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[7].EditedFormattedValue.ToString()) : "";
re.Column6 = this.dgvCreateTextFile.Columns.Count > 8 ? (this.dgvCreateTextFile.Rows[i].Cells[8].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[8].EditedFormattedValue.ToString()) : "";
re.Column7 = this.dgvCreateTextFile.Columns.Count > 9 ? (this.dgvCreateTextFile.Rows[i].Cells[9].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[9].EditedFormattedValue.ToString()) : "";
re.Column8 = this.dgvCreateTextFile.Columns.Count > 10 ? (this.dgvCreateTextFile.Rows[i].Cells[10].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[10].EditedFormattedValue.ToString()) : "";
re.Column9 = this.dgvCreateTextFile.Columns.Count > 11 ? (this.dgvCreateTextFile.Rows[i].Cells[11].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[11].EditedFormattedValue.ToString()) : "";
re.Column10 = this.dgvCreateTextFile.Columns.Count > 12 ? (this.dgvCreateTextFile.Rows[i].Cells[12].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[12].EditedFormattedValue.ToString()) : "";
re.Column11 = this.dgvCreateTextFile.Columns.Count > 13 ? (this.dgvCreateTextFile.Rows[i].Cells[13].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[13].EditedFormattedValue.ToString()) : "";
re.Column12 = this.dgvCreateTextFile.Columns.Count > 14 ? (this.dgvCreateTextFile.Rows[i].Cells[14].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[14].EditedFormattedValue.ToString()) : "";
re.Column13 = this.dgvCreateTextFile.Columns.Count > 15 ? (this.dgvCreateTextFile.Rows[i].Cells[15].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[15].EditedFormattedValue.ToString()) : "";
re.Column14 = this.dgvCreateTextFile.Columns.Count > 16 ? (this.dgvCreateTextFile.Rows[i].Cells[16].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[16].EditedFormattedValue.ToString()) : "";
re.Column15 = this.dgvCreateTextFile.Columns.Count > 17 ? (this.dgvCreateTextFile.Rows[i].Cells[17].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[17].EditedFormattedValue.ToString()) : "";
re.Column16 = this.dgvCreateTextFile.Columns.Count > 18 ? (this.dgvCreateTextFile.Rows[i].Cells[18].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[18].EditedFormattedValue.ToString()) : "";
re.Column17 = this.dgvCreateTextFile.Columns.Count > 19 ? (this.dgvCreateTextFile.Rows[i].Cells[19].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[19].EditedFormattedValue.ToString()) : "";
re.Column18 = this.dgvCreateTextFile.Columns.Count > 20 ? (this.dgvCreateTextFile.Rows[i].Cells[20].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[20].EditedFormattedValue.ToString()) : "";
re.Column19 = this.dgvCreateTextFile.Columns.Count > 21 ? (this.dgvCreateTextFile.Rows[i].Cells[21].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[21].EditedFormattedValue.ToString()) : "";
re.Column20 = this.dgvCreateTextFile.Columns.Count > 22 ? (this.dgvCreateTextFile.Rows[i].Cells[22].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[22].EditedFormattedValue.ToString()) : "";
re.Column21 = this.dgvCreateTextFile.Columns.Count > 23 ? (this.dgvCreateTextFile.Rows[i].Cells[23].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[23].EditedFormattedValue.ToString()) : "";
re.Column22 = this.dgvCreateTextFile.Columns.Count > 24 ? (this.dgvCreateTextFile.Rows[i].Cells[24].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[24].EditedFormattedValue.ToString()) : "";
re.Column23 = this.dgvCreateTextFile.Columns.Count > 25 ? (this.dgvCreateTextFile.Rows[i].Cells[25].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[25].EditedFormattedValue.ToString()) : "";
re.Column24 = this.dgvCreateTextFile.Columns.Count > 26 ? (this.dgvCreateTextFile.Rows[i].Cells[26].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[26].EditedFormattedValue.ToString()) : "";
re.Column25 = this.dgvCreateTextFile.Columns.Count > 27 ? (this.dgvCreateTextFile.Rows[i].Cells[27].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[27].EditedFormattedValue.ToString()) : "";
re.Column26 = this.dgvCreateTextFile.Columns.Count > 28 ? (this.dgvCreateTextFile.Rows[i].Cells[28].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[28].EditedFormattedValue.ToString()) : "";
re.Column27 = this.dgvCreateTextFile.Columns.Count > 29 ? (this.dgvCreateTextFile.Rows[i].Cells[29].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[29].EditedFormattedValue.ToString()) : "";
re.Column28 = this.dgvCreateTextFile.Columns.Count > 30 ? (this.dgvCreateTextFile.Rows[i].Cells[30].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[30].EditedFormattedValue.ToString()) : "";
re.Column29 = this.dgvCreateTextFile.Columns.Count > 31 ? (this.dgvCreateTextFile.Rows[i].Cells[31].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[31].EditedFormattedValue.ToString()) : "";
re.Column30 = this.dgvCreateTextFile.Columns.Count > 32 ? (this.dgvCreateTextFile.Rows[i].Cells[32].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[32].EditedFormattedValue.ToString()) : "";
re.Column31 = this.dgvCreateTextFile.Columns.Count > 33 ? (this.dgvCreateTextFile.Rows[i].Cells[33].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[33].EditedFormattedValue.ToString()) : "";
re.Column32 = this.dgvCreateTextFile.Columns.Count > 34 ? (this.dgvCreateTextFile.Rows[i].Cells[34].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[34].EditedFormattedValue.ToString()) : "";
re.Column33 = this.dgvCreateTextFile.Columns.Count > 35 ? (this.dgvCreateTextFile.Rows[i].Cells[35].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[35].EditedFormattedValue.ToString()) : "";
re.Column34 = this.dgvCreateTextFile.Columns.Count > 36 ? (this.dgvCreateTextFile.Rows[i].Cells[36].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[36].EditedFormattedValue.ToString()) : "";
re.Column35 = this.dgvCreateTextFile.Columns.Count > 37 ? (this.dgvCreateTextFile.Rows[i].Cells[37].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[37].EditedFormattedValue.ToString()) : "";
re.Column36 = this.dgvCreateTextFile.Columns.Count > 38 ? (this.dgvCreateTextFile.Rows[i].Cells[38].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[38].EditedFormattedValue.ToString()) : "";
re.Column37 = this.dgvCreateTextFile.Columns.Count > 39 ? (this.dgvCreateTextFile.Rows[i].Cells[39].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[39].EditedFormattedValue.ToString()) : "";
re.Column38 = this.dgvCreateTextFile.Columns.Count > 40 ? (this.dgvCreateTextFile.Rows[i].Cells[40].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[40].EditedFormattedValue.ToString()) : "";
re.Column39 = this.dgvCreateTextFile.Columns.Count > 41 ? (this.dgvCreateTextFile.Rows[i].Cells[41].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[41].EditedFormattedValue.ToString()) : "";
re.Column40 = this.dgvCreateTextFile.Columns.Count > 42 ? (this.dgvCreateTextFile.Rows[i].Cells[42].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[42].EditedFormattedValue.ToString()) : "";
re.Column41 = this.dgvCreateTextFile.Columns.Count > 43 ? (this.dgvCreateTextFile.Rows[i].Cells[43].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[43].EditedFormattedValue.ToString()) : "";
re.Column42 = this.dgvCreateTextFile.Columns.Count > 44 ? (this.dgvCreateTextFile.Rows[i].Cells[44].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[44].EditedFormattedValue.ToString()) : "";
re.Column43 = this.dgvCreateTextFile.Columns.Count > 45 ? (this.dgvCreateTextFile.Rows[i].Cells[45].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[45].EditedFormattedValue.ToString()) : "";
re.Column44 = this.dgvCreateTextFile.Columns.Count > 46 ? (this.dgvCreateTextFile.Rows[i].Cells[46].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[46].EditedFormattedValue.ToString()) : "";
re.Column45 = this.dgvCreateTextFile.Columns.Count > 47 ? (this.dgvCreateTextFile.Rows[i].Cells[47].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[47].EditedFormattedValue.ToString()) : "";
re.Column46 = this.dgvCreateTextFile.Columns.Count > 48 ? (this.dgvCreateTextFile.Rows[i].Cells[48].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[48].EditedFormattedValue.ToString()) : "";
re.Column47 = this.dgvCreateTextFile.Columns.Count > 49 ? (this.dgvCreateTextFile.Rows[i].Cells[49].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[49].EditedFormattedValue.ToString()) : "";
re.Column48 = this.dgvCreateTextFile.Columns.Count > 50 ? (this.dgvCreateTextFile.Rows[i].Cells[50].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[50].EditedFormattedValue.ToString()) : "";
re.Column49 = this.dgvCreateTextFile.Columns.Count > 51 ? (this.dgvCreateTextFile.Rows[i].Cells[51].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[51].EditedFormattedValue.ToString()) : "";
re.Column50 = this.dgvCreateTextFile.Columns.Count > 52 ? (this.dgvCreateTextFile.Rows[i].Cells[52].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[52].EditedFormattedValue.ToString()) : "";
if (string.IsNullOrEmpty(re.LineIndicator))
this.dgvCreateTextFile.Rows[i].Cells[1].ErrorText = "Not null.";
else
this.dgvCreateTextFile.Rows[i].Cells[1].ErrorText = string.Empty;
cmd2.Parameters.Add(new SqlParameter("@LineIndicator", re.LineIndicator));
cmd2.Parameters.Add(new SqlParameter("@Column1", re.Column1));
cmd2.Parameters.Add(new SqlParameter("@Column2", re.Column2));
cmd2.Parameters.Add(new SqlParameter("@Column3", re.Column3));
cmd2.Parameters.Add(new SqlParameter("@Column4", re.Column4));
cmd2.Parameters.Add(new SqlParameter("@Column5", re.Column5));
cmd2.Parameters.Add(new SqlParameter("@Column6", re.Column6));
cmd2.Parameters.Add(new SqlParameter("@Column7", re.Column7));
cmd2.Parameters.Add(new SqlParameter("@Column8", re.Column8));
cmd2.Parameters.Add(new SqlParameter("@Column9", re.Column9));
cmd2.Parameters.Add(new SqlParameter("@Column10", re.Column10));
cmd2.Parameters.Add(new SqlParameter("@Column11", re.Column11));
cmd2.Parameters.Add(new SqlParameter("@Column12", re.Column12));
cmd2.Parameters.Add(new SqlParameter("@Column13", re.Column13));
cmd2.Parameters.Add(new SqlParameter("@Column14", re.Column14));
cmd2.Parameters.Add(new SqlParameter("@Column15", re.Column15));
cmd2.Parameters.Add(new SqlParameter("@Column16", re.Column16));
cmd2.Parameters.Add(new SqlParameter("@Column17", re.Column17));
cmd2.Parameters.Add(new SqlParameter("@Column18", re.Column18));
cmd2.Parameters.Add(new SqlParameter("@Column19", re.Column19));
cmd2.Parameters.Add(new SqlParameter("@Column20", re.Column20));
cmd2.Parameters.Add(new SqlParameter("@Column21", re.Column21));
cmd2.Parameters.Add(new SqlParameter("@Column22", re.Column22));
cmd2.Parameters.Add(new SqlParameter("@Column23", re.Column23));
cmd2.Parameters.Add(new SqlParameter("@Column24", re.Column24));
cmd2.Parameters.Add(new SqlParameter("@Column25", re.Column25));
cmd2.Parameters.Add(new SqlParameter("@Column26", re.Column26));
cmd2.Parameters.Add(new SqlParameter("@Column27", re.Column27));
cmd2.Parameters.Add(new SqlParameter("@Column28", re.Column28));
cmd2.Parameters.Add(new SqlParameter("@Column29", re.Column29));
cmd2.Parameters.Add(new SqlParameter("@Column30", re.Column30));
cmd2.Parameters.Add(new SqlParameter("@Column31", re.Column31));
cmd2.Parameters.Add(new SqlParameter("@Column32", re.Column32));
cmd2.Parameters.Add(new SqlParameter("@Column33", re.Column33));
cmd2.Parameters.Add(new SqlParameter("@Column34", re.Column34));
cmd2.Parameters.Add(new SqlParameter("@Column35", re.Column35));
cmd2.Parameters.Add(new SqlParameter("@Column36", re.Column36));
cmd2.Parameters.Add(new SqlParameter("@Column37", re.Column37));
cmd2.Parameters.Add(new SqlParameter("@Column38", re.Column38));
cmd2.Parameters.Add(new SqlParameter("@Column39", re.Column39));
cmd2.Parameters.Add(new SqlParameter("@Column40", re.Column40));
cmd2.Parameters.Add(new SqlParameter("@Column41", re.Column41));
cmd2.Parameters.Add(new SqlParameter("@Column42", re.Column42));
cmd2.Parameters.Add(new SqlParameter("@Column43", re.Column43));
cmd2.Parameters.Add(new SqlParameter("@Column44", re.Column44));
cmd2.Parameters.Add(new SqlParameter("@Column45", re.Column45));
cmd2.Parameters.Add(new SqlParameter("@Column46", re.Column46));
cmd2.Parameters.Add(new SqlParameter("@Column47", re.Column47));
cmd2.Parameters.Add(new SqlParameter("@Column48", re.Column48));
cmd2.Parameters.Add(new SqlParameter("@Column49", re.Column49));
cmd2.Parameters.Add(new SqlParameter("@Column50", re.Column50));
cmd2.Parameters.Add(new SqlParameter("@HeaderTextes", HeaderText));
cmd2.Parameters.Add(new SqlParameter("@StartinginCell", re.StartinginCell));
cmd2.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd2.Parameters.Add(new SqlParameter("@IncludeHeaderRow", re.IncludeHeaderRow == "True" ? true : false));
cmd2.Parameters.Add(new SqlParameter("@SavePath", ""));
cmd2.Parameters.Add(new SqlParameter("@SaveName", ""));
cmd2.Parameters.Add(new SqlParameter("@ReferenceNumber", re.ReferenceNumber));
cmd2.Parameters.Add(new SqlParameter("@SunComponent", cbItems.Text.Substring(0, cbItems.Text.LastIndexOf(","))));
cmd2.Parameters.Add(new SqlParameter("@SunMethod", cbItems.Text.Substring(cbItems.Text.LastIndexOf(",") + 1)));
cmd2.Parameters.Add(new SqlParameter("@HeaderValue", HeaderValue));
cmd2.Parameters.Add(new SqlParameter("@ProcessName", ""));
re.SunComponent = cbItems.Text.Substring(0, cbItems.Text.LastIndexOf(","));
re.SunMethod = cbItems.Text.Substring(cbItems.Text.LastIndexOf(",") + 1);
rdr = cmd2.ExecuteReader();
rdr.Close();
cmd2.Parameters.Clear();
if (!string.IsNullOrEmpty(re.LineIndicator) && !string.IsNullOrEmpty(re.StartinginCell))
{
finallistCTF.Add(re);
LineIndicatorList.Add(re.LineIndicator);
startInCellList.Add(re.StartinginCell);
}
}
this.Invalidate();
//if (LineIndicatorList.Count == 0)
//{
// throw new Exception("No data found for specified Line Indicator(s)!");
//}
if (sender == null)
AddCreateTextFileEntityListIntoFinalList(finallistCTF, LineIndicatorList, startInCellList, ws, ref finallistCTF);
}
catch (Exception ex)
{
throw new Exception(ex.Message + " - Data Error in XML/Text File tab, Output settings !");
}
finally
{
if (conn != null)
{
conn.Close();
}
if (rdr != null)
{
rdr.Close();
}
}
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="ws"></param>
public void SaveCTF(object sender, Microsoft.Office.Interop.Excel.Worksheet ws)
{
string HeaderText = string.Empty;
string HeaderValue = string.Empty;
List<string> LineIndicatorList = new List<string>();
List<string> startInCellList = new List<string>();
for (int k = 0; k < dgvCreateTextFile.Columns.Count; k++)
{
HeaderText += dgvCreateTextFile.Columns[k].HeaderText + ",";
HeaderValue += dgvCreateTextFile.Columns[k].Name + ",";
}
if (!string.IsNullOrEmpty(SessionInfo.UserInfo.File_ftid))
{
SqlConnection conn = null;
SqlDataReader rdr = null;
try
{
conn = new
SqlConnection(ConfigurationManager.ConnectionStrings["conRsTool"].ConnectionString.ToString());
conn.Open();
SqlCommand cmd = new SqlCommand("rsTemplateXMLTextFileDGV_Del", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd.Parameters.Add(new SqlParameter("@ProcessName", cbItems.Text));
rdr = cmd.ExecuteReader();
rdr.Close();
finallistCTF.Clear();
SqlCommand cmd2 = new SqlCommand("rsTemplateXMLTextFileDGV_Ins", conn);
cmd2.CommandType = CommandType.StoredProcedure;
for (int i = 0; i < this.dgvCreateTextFile.Rows.Count; i++)
{
string isEmptyStr = string.Empty;
for (int k = 0; k < dgvCreateTextFile.Rows[i].Cells.Count; k++)
{
isEmptyStr += dgvCreateTextFile.Rows[i].Cells[k].EditedFormattedValue;
}
if (string.IsNullOrEmpty(isEmptyStr)) continue;
RowCreateTextFile re = new RowCreateTextFile();
re.ReferenceNumber = this.dgvCreateTextFile.Rows[i].Cells[0].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[0].Value.ToString().Replace(" ", "");
re.LineIndicator = this.dgvCreateTextFile.Rows[i].Cells[1].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[1].Value.ToString();
re.StartinginCell = this.dgvCreateTextFile.Rows[i].Cells[2].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[2].Value.ToString();
re.SavePath = this.dgvCreateTextFile.Rows[i].Cells[3].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[3].Value.ToString();
re.SaveName = this.dgvCreateTextFile.Rows[i].Cells[4].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[4].Value.ToString();
re.IncludeHeaderRow = this.dgvCreateTextFile.Rows[i].Cells[5].Value == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[5].Value.ToString();
re.Column1 = this.dgvCreateTextFile.Columns.Count > 6 ? (this.dgvCreateTextFile.Rows[i].Cells[6].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[6].EditedFormattedValue.ToString()) : "";
re.Column2 = this.dgvCreateTextFile.Columns.Count > 7 ? (this.dgvCreateTextFile.Rows[i].Cells[7].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[7].EditedFormattedValue.ToString()) : "";
re.Column3 = this.dgvCreateTextFile.Columns.Count > 8 ? (this.dgvCreateTextFile.Rows[i].Cells[8].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[8].EditedFormattedValue.ToString()) : "";
re.Column4 = this.dgvCreateTextFile.Columns.Count > 9 ? (this.dgvCreateTextFile.Rows[i].Cells[9].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[9].EditedFormattedValue.ToString()) : "";
re.Column5 = this.dgvCreateTextFile.Columns.Count > 10 ? (this.dgvCreateTextFile.Rows[i].Cells[10].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[10].EditedFormattedValue.ToString()) : "";
re.Column6 = this.dgvCreateTextFile.Columns.Count > 11 ? (this.dgvCreateTextFile.Rows[i].Cells[11].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[11].EditedFormattedValue.ToString()) : "";
re.Column7 = this.dgvCreateTextFile.Columns.Count > 12 ? (this.dgvCreateTextFile.Rows[i].Cells[12].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[12].EditedFormattedValue.ToString()) : "";
re.Column8 = this.dgvCreateTextFile.Columns.Count > 13 ? (this.dgvCreateTextFile.Rows[i].Cells[13].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[13].EditedFormattedValue.ToString()) : "";
re.Column9 = this.dgvCreateTextFile.Columns.Count > 14 ? (this.dgvCreateTextFile.Rows[i].Cells[14].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[14].EditedFormattedValue.ToString()) : "";
re.Column10 = this.dgvCreateTextFile.Columns.Count > 15 ? (this.dgvCreateTextFile.Rows[i].Cells[15].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[15].EditedFormattedValue.ToString()) : "";
re.Column11 = this.dgvCreateTextFile.Columns.Count > 16 ? (this.dgvCreateTextFile.Rows[i].Cells[16].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[16].EditedFormattedValue.ToString()) : "";
re.Column12 = this.dgvCreateTextFile.Columns.Count > 17 ? (this.dgvCreateTextFile.Rows[i].Cells[17].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[17].EditedFormattedValue.ToString()) : "";
re.Column13 = this.dgvCreateTextFile.Columns.Count > 18 ? (this.dgvCreateTextFile.Rows[i].Cells[18].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[18].EditedFormattedValue.ToString()) : "";
re.Column14 = this.dgvCreateTextFile.Columns.Count > 19 ? (this.dgvCreateTextFile.Rows[i].Cells[19].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[19].EditedFormattedValue.ToString()) : "";
re.Column15 = this.dgvCreateTextFile.Columns.Count > 20 ? (this.dgvCreateTextFile.Rows[i].Cells[20].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[20].EditedFormattedValue.ToString()) : "";
re.Column16 = this.dgvCreateTextFile.Columns.Count > 21 ? (this.dgvCreateTextFile.Rows[i].Cells[21].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[21].EditedFormattedValue.ToString()) : "";
re.Column17 = this.dgvCreateTextFile.Columns.Count > 22 ? (this.dgvCreateTextFile.Rows[i].Cells[22].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[22].EditedFormattedValue.ToString()) : "";
re.Column18 = this.dgvCreateTextFile.Columns.Count > 23 ? (this.dgvCreateTextFile.Rows[i].Cells[23].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[23].EditedFormattedValue.ToString()) : "";
re.Column19 = this.dgvCreateTextFile.Columns.Count > 24 ? (this.dgvCreateTextFile.Rows[i].Cells[24].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[24].EditedFormattedValue.ToString()) : "";
re.Column20 = this.dgvCreateTextFile.Columns.Count > 25 ? (this.dgvCreateTextFile.Rows[i].Cells[25].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[25].EditedFormattedValue.ToString()) : "";
re.Column21 = this.dgvCreateTextFile.Columns.Count > 26 ? (this.dgvCreateTextFile.Rows[i].Cells[26].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[26].EditedFormattedValue.ToString()) : "";
re.Column22 = this.dgvCreateTextFile.Columns.Count > 27 ? (this.dgvCreateTextFile.Rows[i].Cells[27].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[27].EditedFormattedValue.ToString()) : "";
re.Column23 = this.dgvCreateTextFile.Columns.Count > 28 ? (this.dgvCreateTextFile.Rows[i].Cells[28].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[28].EditedFormattedValue.ToString()) : "";
re.Column24 = this.dgvCreateTextFile.Columns.Count > 29 ? (this.dgvCreateTextFile.Rows[i].Cells[29].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[29].EditedFormattedValue.ToString()) : "";
re.Column25 = this.dgvCreateTextFile.Columns.Count > 30 ? (this.dgvCreateTextFile.Rows[i].Cells[30].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[30].EditedFormattedValue.ToString()) : "";
re.Column26 = this.dgvCreateTextFile.Columns.Count > 31 ? (this.dgvCreateTextFile.Rows[i].Cells[31].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[31].EditedFormattedValue.ToString()) : "";
re.Column27 = this.dgvCreateTextFile.Columns.Count > 32 ? (this.dgvCreateTextFile.Rows[i].Cells[32].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[32].EditedFormattedValue.ToString()) : "";
re.Column28 = this.dgvCreateTextFile.Columns.Count > 33 ? (this.dgvCreateTextFile.Rows[i].Cells[33].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[33].EditedFormattedValue.ToString()) : "";
re.Column29 = this.dgvCreateTextFile.Columns.Count > 34 ? (this.dgvCreateTextFile.Rows[i].Cells[34].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[34].EditedFormattedValue.ToString()) : "";
re.Column30 = this.dgvCreateTextFile.Columns.Count > 35 ? (this.dgvCreateTextFile.Rows[i].Cells[35].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[35].EditedFormattedValue.ToString()) : "";
re.Column31 = this.dgvCreateTextFile.Columns.Count > 36 ? (this.dgvCreateTextFile.Rows[i].Cells[36].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[36].EditedFormattedValue.ToString()) : "";
re.Column32 = this.dgvCreateTextFile.Columns.Count > 37 ? (this.dgvCreateTextFile.Rows[i].Cells[37].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[37].EditedFormattedValue.ToString()) : "";
re.Column33 = this.dgvCreateTextFile.Columns.Count > 38 ? (this.dgvCreateTextFile.Rows[i].Cells[38].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[38].EditedFormattedValue.ToString()) : "";
re.Column34 = this.dgvCreateTextFile.Columns.Count > 39 ? (this.dgvCreateTextFile.Rows[i].Cells[39].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[39].EditedFormattedValue.ToString()) : "";
re.Column35 = this.dgvCreateTextFile.Columns.Count > 40 ? (this.dgvCreateTextFile.Rows[i].Cells[40].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[40].EditedFormattedValue.ToString()) : "";
re.Column36 = this.dgvCreateTextFile.Columns.Count > 41 ? (this.dgvCreateTextFile.Rows[i].Cells[41].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[41].EditedFormattedValue.ToString()) : "";
re.Column37 = this.dgvCreateTextFile.Columns.Count > 42 ? (this.dgvCreateTextFile.Rows[i].Cells[42].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[42].EditedFormattedValue.ToString()) : "";
re.Column38 = this.dgvCreateTextFile.Columns.Count > 43 ? (this.dgvCreateTextFile.Rows[i].Cells[43].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[43].EditedFormattedValue.ToString()) : "";
re.Column39 = this.dgvCreateTextFile.Columns.Count > 44 ? (this.dgvCreateTextFile.Rows[i].Cells[44].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[44].EditedFormattedValue.ToString()) : "";
re.Column40 = this.dgvCreateTextFile.Columns.Count > 45 ? (this.dgvCreateTextFile.Rows[i].Cells[45].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[45].EditedFormattedValue.ToString()) : "";
re.Column41 = this.dgvCreateTextFile.Columns.Count > 46 ? (this.dgvCreateTextFile.Rows[i].Cells[46].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[46].EditedFormattedValue.ToString()) : "";
re.Column42 = this.dgvCreateTextFile.Columns.Count > 47 ? (this.dgvCreateTextFile.Rows[i].Cells[47].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[47].EditedFormattedValue.ToString()) : "";
re.Column43 = this.dgvCreateTextFile.Columns.Count > 48 ? (this.dgvCreateTextFile.Rows[i].Cells[48].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[48].EditedFormattedValue.ToString()) : "";
re.Column44 = this.dgvCreateTextFile.Columns.Count > 49 ? (this.dgvCreateTextFile.Rows[i].Cells[49].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[49].EditedFormattedValue.ToString()) : "";
re.Column45 = this.dgvCreateTextFile.Columns.Count > 50 ? (this.dgvCreateTextFile.Rows[i].Cells[50].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[50].EditedFormattedValue.ToString()) : "";
re.Column46 = this.dgvCreateTextFile.Columns.Count > 51 ? (this.dgvCreateTextFile.Rows[i].Cells[51].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[51].EditedFormattedValue.ToString()) : "";
re.Column47 = this.dgvCreateTextFile.Columns.Count > 52 ? (this.dgvCreateTextFile.Rows[i].Cells[52].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[52].EditedFormattedValue.ToString()) : "";
re.Column48 = this.dgvCreateTextFile.Columns.Count > 53 ? (this.dgvCreateTextFile.Rows[i].Cells[53].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[53].EditedFormattedValue.ToString()) : "";
re.Column49 = this.dgvCreateTextFile.Columns.Count > 54 ? (this.dgvCreateTextFile.Rows[i].Cells[54].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[54].EditedFormattedValue.ToString()) : "";
re.Column50 = this.dgvCreateTextFile.Columns.Count > 55 ? (this.dgvCreateTextFile.Rows[i].Cells[55].EditedFormattedValue == null ? "" : this.dgvCreateTextFile.Rows[i].Cells[55].EditedFormattedValue.ToString()) : "";
if (string.IsNullOrEmpty(re.LineIndicator))
this.dgvCreateTextFile.Rows[i].Cells[1].ErrorText = "Not null.";
else
this.dgvCreateTextFile.Rows[i].Cells[1].ErrorText = string.Empty;
cmd2.Parameters.Add(new SqlParameter("@LineIndicator", re.LineIndicator));
cmd2.Parameters.Add(new SqlParameter("@Column1", re.Column1));
cmd2.Parameters.Add(new SqlParameter("@Column2", re.Column2));
cmd2.Parameters.Add(new SqlParameter("@Column3", re.Column3));
cmd2.Parameters.Add(new SqlParameter("@Column4", re.Column4));
cmd2.Parameters.Add(new SqlParameter("@Column5", re.Column5));
cmd2.Parameters.Add(new SqlParameter("@Column6", re.Column6));
cmd2.Parameters.Add(new SqlParameter("@Column7", re.Column7));
cmd2.Parameters.Add(new SqlParameter("@Column8", re.Column8));
cmd2.Parameters.Add(new SqlParameter("@Column9", re.Column9));
cmd2.Parameters.Add(new SqlParameter("@Column10", re.Column10));
cmd2.Parameters.Add(new SqlParameter("@Column11", re.Column11));
cmd2.Parameters.Add(new SqlParameter("@Column12", re.Column12));
cmd2.Parameters.Add(new SqlParameter("@Column13", re.Column13));
cmd2.Parameters.Add(new SqlParameter("@Column14", re.Column14));
cmd2.Parameters.Add(new SqlParameter("@Column15", re.Column15));
cmd2.Parameters.Add(new SqlParameter("@Column16", re.Column16));
cmd2.Parameters.Add(new SqlParameter("@Column17", re.Column17));
cmd2.Parameters.Add(new SqlParameter("@Column18", re.Column18));
cmd2.Parameters.Add(new SqlParameter("@Column19", re.Column19));
cmd2.Parameters.Add(new SqlParameter("@Column20", re.Column20));
cmd2.Parameters.Add(new SqlParameter("@Column21", re.Column21));
cmd2.Parameters.Add(new SqlParameter("@Column22", re.Column22));
cmd2.Parameters.Add(new SqlParameter("@Column23", re.Column23));
cmd2.Parameters.Add(new SqlParameter("@Column24", re.Column24));
cmd2.Parameters.Add(new SqlParameter("@Column25", re.Column25));
cmd2.Parameters.Add(new SqlParameter("@Column26", re.Column26));
cmd2.Parameters.Add(new SqlParameter("@Column27", re.Column27));
cmd2.Parameters.Add(new SqlParameter("@Column28", re.Column28));
cmd2.Parameters.Add(new SqlParameter("@Column29", re.Column29));
cmd2.Parameters.Add(new SqlParameter("@Column30", re.Column30));
cmd2.Parameters.Add(new SqlParameter("@Column31", re.Column31));
cmd2.Parameters.Add(new SqlParameter("@Column32", re.Column32));
cmd2.Parameters.Add(new SqlParameter("@Column33", re.Column33));
cmd2.Parameters.Add(new SqlParameter("@Column34", re.Column34));
cmd2.Parameters.Add(new SqlParameter("@Column35", re.Column35));
cmd2.Parameters.Add(new SqlParameter("@Column36", re.Column36));
cmd2.Parameters.Add(new SqlParameter("@Column37", re.Column37));
cmd2.Parameters.Add(new SqlParameter("@Column38", re.Column38));
cmd2.Parameters.Add(new SqlParameter("@Column39", re.Column39));
cmd2.Parameters.Add(new SqlParameter("@Column40", re.Column40));
cmd2.Parameters.Add(new SqlParameter("@Column41", re.Column41));
cmd2.Parameters.Add(new SqlParameter("@Column42", re.Column42));
cmd2.Parameters.Add(new SqlParameter("@Column43", re.Column43));
cmd2.Parameters.Add(new SqlParameter("@Column44", re.Column44));
cmd2.Parameters.Add(new SqlParameter("@Column45", re.Column45));
cmd2.Parameters.Add(new SqlParameter("@Column46", re.Column46));
cmd2.Parameters.Add(new SqlParameter("@Column47", re.Column47));
cmd2.Parameters.Add(new SqlParameter("@Column48", re.Column48));
cmd2.Parameters.Add(new SqlParameter("@Column49", re.Column49));
cmd2.Parameters.Add(new SqlParameter("@Column50", re.Column50));
cmd2.Parameters.Add(new SqlParameter("@HeaderTextes", HeaderText));
cmd2.Parameters.Add(new SqlParameter("@StartinginCell", re.StartinginCell));
cmd2.Parameters.Add(new SqlParameter("@TemplateID", SessionInfo.UserInfo.File_ftid));
cmd2.Parameters.Add(new SqlParameter("@IncludeHeaderRow", re.IncludeHeaderRow == "True" ? true : false));
cmd2.Parameters.Add(new SqlParameter("@SavePath", re.SavePath));
cmd2.Parameters.Add(new SqlParameter("@SaveName", re.SaveName));
cmd2.Parameters.Add(new SqlParameter("@ReferenceNumber", re.ReferenceNumber));
cmd2.Parameters.Add(new SqlParameter("@SunComponent", ""));
cmd2.Parameters.Add(new SqlParameter("@SunMethod", ""));
cmd2.Parameters.Add(new SqlParameter("@HeaderValue", HeaderValue));
cmd2.Parameters.Add(new SqlParameter("@ProcessName", cbItems.Text));
rdr = cmd2.ExecuteReader();
rdr.Close();
cmd2.Parameters.Clear();
if (!string.IsNullOrEmpty(re.LineIndicator) && !string.IsNullOrEmpty(re.StartinginCell))
{
finallistCTF.Add(re);
LineIndicatorList.Add(re.LineIndicator);
startInCellList.Add(re.StartinginCell);
}
}
this.Invalidate();
//if (LineIndicatorList.Count == 0)
//{
// throw new Exception("No data found for specified Line Indicator(s)!");
//}
if (sender == null)
AddCreateTextFileEntityListIntoFinalList(finallistCTF, LineIndicatorList, startInCellList, ws, ref finallistCTF);
}
catch (Exception ex)
{
throw new Exception(ex.Message + " - Data Error in XML/Text File tab, Output settings !");
}
finally
{
if (conn != null)
{
conn.Close();
}
if (rdr != null)
{
rdr.Close();
}
}
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnTestTransUpd_Click(object sender, EventArgs e)
{
if (Ribbon2.tupf != null)
Ribbon2.tupf.Dispose();
DateTime starttime = DateTime.Now;
Ribbon2.tupf = new TransUpdPostFrm();
Ribbon2.tupf.panel3.Visible = true;
Ribbon2.tupf.panel1.Visible = false;
Ribbon2.tupf.panel2.Visible = false;
Ribbon2.tupf.panel3.Dock = DockStyle.Fill;
Ribbon2.tupf.ControlBox = false;
OutputContainer.isTransUpdFlag = true;
try
{
Globals.ThisAddIn.Application.DisplayAlerts = false;
Ribbon2.wsRrigin = (Microsoft.Office.Interop.Excel.Worksheet)Globals.ThisAddIn.Application.ActiveWorkbook.ActiveSheet;
var lastColumn = Ribbon2.wsRrigin.Cells.Find("*", Ribbon2.wsRrigin.Cells[1, 1], Microsoft.Office.Interop.Excel.XlFindLookIn.xlFormulas, Microsoft.Office.Interop.Excel.XlLookAt.xlPart, Microsoft.Office.Interop.Excel.XlSearchOrder.xlByColumns, Microsoft.Office.Interop.Excel.XlSearchDirection.xlPrevious, false, Type.Missing, Type.Missing);
Ribbon2.LastColumnName = Finance_Tools.RemoveNumber(lastColumn.Address).Replace("$", "");
Finance_Tools.MaxColumnCount = lastColumn.Column;
var lastrow = Ribbon2.wsRrigin.Cells.Find("*", Ribbon2.wsRrigin.Cells[1, 1], Microsoft.Office.Interop.Excel.XlFindLookIn.xlFormulas, Microsoft.Office.Interop.Excel.XlLookAt.xlPart, Microsoft.Office.Interop.Excel.XlSearchOrder.xlByRows, Microsoft.Office.Interop.Excel.XlSearchDirection.xlPrevious, false, Type.Missing, Type.Missing);
Ribbon2.LastRowNumber = Finance_Tools.RemoveNotNumber(lastrow.Address);
Globals.ThisAddIn.Application.ScreenUpdating = false;
Ribbon2.tupf.Show();
TransUpdPostFrm.richTextBox1.Text += "Error List :\r\n";
SaveTransUpd(null, Ribbon2.wsRrigin);
SetSession();
Globals.ThisAddIn.Application.ScreenUpdating = true;
Globals.ThisAddIn.Application.DisplayAlerts = true;
if (TransUpdPostFrm.richTextBox1.Text.Length > 21)
{
LogHelper.WriteLog(typeof(Ribbon2), TransUpdPostFrm.richTextBox1.Text + " - Journal update processing error , Template:" + SessionInfo.UserInfo.FileName);
}
}
catch (Exception ex)
{
if (ex.ToString().Contains("Clipboard"))
{
MessageBox.Show("Clipboard not ready, please try again.", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
else
{
MessageBox.Show(ex.ToString(), "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
LogHelper.WriteLog(typeof(OutputContainer), ex.Message + "Journal update error");
Ribbon2.tupf.Dispose();
Globals.ThisAddIn.Application.ScreenUpdating = true;
Clipboard.SetText("\r\n");
}
finally
{
Ribbon2.tupf.Focus();
Ribbon2.tupf.bddata();
Ribbon2.tupf.panel3.Visible = false;
Ribbon2.tupf.panel1.Visible = true;
Ribbon2.tupf.panel2.Visible = true;
Ribbon2.tupf.ControlBox = true;
DateTime stoptime = DateTime.Now;
string costtime = Finance_Tools.DateDiff(starttime, stoptime);
Globals.ThisAddIn.Application.StatusBar = "latest journal update process costs " + costtime;
GC.Collect();
OutputContainer.isTransUpdFlag = false;
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnTestCTF_Click(object sender, EventArgs e)
{
if (dgvCreateTextFile.SelectedRows.Count == 0)
{
MessageBox.Show("Please click the row header number ( before Ref column ) to choose a certain Reference number and enjoy your test ! ", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
return;
}
else
{
if (dgvCreateTextFile.SelectedRows[0].Cells[0].Value != null)
SessionInfo.UserInfo.CurrentRef = dgvCreateTextFile.SelectedRows[0].Cells[0].Value.ToString().Replace(" ", "");
}
DateTime starttime = DateTime.Now;
if (Ribbon2.ctff != null)
Ribbon2.ctff.Dispose();
Ribbon2.ctff = new CreateTextFileForm();
Ribbon2.ctff.panel3.Visible = true;
Ribbon2.ctff.panel1.Visible = false;
Ribbon2.ctff.panel2.Visible = false;
Ribbon2.ctff.panel3.Dock = DockStyle.Fill;
Ribbon2.ctff.ControlBox = false;
try
{
Globals.ThisAddIn.Application.DisplayAlerts = false;
Ribbon2.wsRrigin = (Microsoft.Office.Interop.Excel.Worksheet)Globals.ThisAddIn.Application.ActiveWorkbook.ActiveSheet;
var lastColumn = Ribbon2.wsRrigin.Cells.Find("*", Ribbon2.wsRrigin.Cells[1, 1], Microsoft.Office.Interop.Excel.XlFindLookIn.xlFormulas, Microsoft.Office.Interop.Excel.XlLookAt.xlPart, Microsoft.Office.Interop.Excel.XlSearchOrder.xlByColumns, Microsoft.Office.Interop.Excel.XlSearchDirection.xlPrevious, false, Type.Missing, Type.Missing);
Ribbon2.LastColumnName = Finance_Tools.RemoveNumber(lastColumn.Address).Replace("$", "");
Finance_Tools.MaxColumnCount = lastColumn.Column;
var lastrow = Ribbon2.wsRrigin.Cells.Find("*", Ribbon2.wsRrigin.Cells[1, 1], Microsoft.Office.Interop.Excel.XlFindLookIn.xlFormulas, Microsoft.Office.Interop.Excel.XlLookAt.xlPart, Microsoft.Office.Interop.Excel.XlSearchOrder.xlByRows, Microsoft.Office.Interop.Excel.XlSearchDirection.xlPrevious, false, Type.Missing, Type.Missing);
Ribbon2.LastRowNumber = Finance_Tools.RemoveNotNumber(lastrow.Address);
Globals.ThisAddIn.Application.ScreenUpdating = false;
CreateTextFileForm.richTextBox1.Text += "Error List :\r\n";
if (cbXMLOrText.Text == "XML")
{
Ribbon2.ctff.Show();
SessionInfo.UserInfo.ComName = cbItems.Text.Substring(0, cbItems.Text.LastIndexOf(","));
SessionInfo.UserInfo.MethodName = cbItems.Text.Substring(cbItems.Text.LastIndexOf(",") + 1);
SaveXML(null, Ribbon2.wsRrigin);
}
else
{
SaveCTF(null, Ribbon2.wsRrigin);
}
Globals.ThisAddIn.Application.ScreenUpdating = true;
Globals.ThisAddIn.Application.DisplayAlerts = true;
if (CreateTextFileForm.richTextBox1.Text.Length > 21)
{
LogHelper.WriteLog(typeof(Ribbon2), CreateTextFileForm.richTextBox1.Text + " - Create Text File Processing error , Template:" + SessionInfo.UserInfo.FileName);
}
if (cbXMLOrText.Text != "XML" && (finallistCTF.Count != 0))
{
string fileName = "";
string filepath = "";
bool includeHeaderRow = false;
if (dgvCreateTextFile.SelectedRows[0].Cells[4].Value != null)
fileName = dgvCreateTextFile.SelectedRows[0].Cells[4].Value.ToString().Replace(" ", "");
if (dgvCreateTextFile.SelectedRows[0].Cells[3].Value != null)
filepath = dgvCreateTextFile.SelectedRows[0].Cells[3].Value.ToString().Replace(" ", "");
if (dgvCreateTextFile.SelectedRows[0].Cells[5].Value != null)
includeHeaderRow = dgvCreateTextFile.SelectedRows[0].Cells[5].Value.ToString().Replace(" ", "") == "True" ? true : false;
GenTextFile(sender, fileName, filepath, includeHeaderRow);
return;
}
}
catch (Exception ex)
{
if (ex.ToString().Contains("Clipboard"))
{
MessageBox.Show("Clipboard not ready, please try again.", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
else
{
MessageBox.Show(ex.ToString(), "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
}
LogHelper.WriteLog(typeof(Ribbon2), ex.Message + "Create Text File error");
Ribbon2.ctff.Dispose();
Globals.ThisAddIn.Application.ScreenUpdating = true;
Clipboard.SetText("\r\n");
}
finally
{
Ribbon2.ctff.Focus();
Ribbon2.ctff.bddata();
Ribbon2.ctff.panel3.Visible = false;
Ribbon2.ctff.panel1.Visible = true;
Ribbon2.ctff.panel2.Visible = true;
Ribbon2.ctff.ControlBox = true;
DateTime stoptime = DateTime.Now;
string costtime = Finance_Tools.DateDiff(starttime, stoptime);
Globals.ThisAddIn.Application.StatusBar = "latest create text file process costs " + costtime;
GC.Collect();
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="fileName"></param>
/// <param name="filepath"></param>
/// <param name="includeHeaderRow"></param>
public void GenTextFile(object sender, string fileName, string filepath, bool includeHeaderRow)
{
try
{
bool error = Ribbon2.wsRrigin.Cells.get_Range(fileName).Errors.Item[1].Value;
if (error || string.IsNullOrEmpty(fileName))
{ }
else
fileName = Ribbon2.wsRrigin.Cells.get_Range(fileName).Value.ToString();
}
catch { }
if (File.Exists(filepath + "\\" + fileName + ".txt"))
{
File.Delete(filepath + "\\" + fileName + ".txt");
}
FileStream aFile = new FileStream(filepath + "\\" + fileName + ".txt", FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite);//txtSavePath.Text
StreamWriter sw = new StreamWriter(aFile);
int columnCount = dgvCreateTextFile.Columns.Count - 6;
if (includeHeaderRow)
{
sw.WriteLine("{0}{1}{2}{3}{4}{5}{6}{7}{8}{9}{10}{11}{12}{13}{14}{15}{16}{17}{18}{19}{20}{21}{22}{23}{24}{25}{26}{27}{28}{29}{30}{31}{32}{33}{34}{35}{36}{37}{38}{39}{40}{41}{42}{43}{44}{45}{46}{47}{48}{49}{50}{51}{52}{53}{54}{55}{56}{57}{58}{59}{60}{61}{62}{63}{64}{65}{66}{67}{68}{69}{70}{71}{72}{73}{74}{75}{76}{77}{78}{79}{80}{81}{82}{83}{84}{85}{86}{87}{88}{89}{90}{91}{92}{93}{94}{95}{96}{97}{98}{99}", " ", columnCount > 0 ? dgvCreateTextFile.Columns[6].HeaderText : "", " ", columnCount > 1 ? dgvCreateTextFile.Columns[7].HeaderText : "",
" ", columnCount > 2 ? dgvCreateTextFile.Columns[8].HeaderText : "", " ", columnCount > 3 ? dgvCreateTextFile.Columns[9].HeaderText : "",
" ", columnCount > 4 ? dgvCreateTextFile.Columns[10].HeaderText : "", " ", columnCount > 5 ? dgvCreateTextFile.Columns[11].HeaderText : "",
" ", columnCount > 6 ? dgvCreateTextFile.Columns[12].HeaderText : "", " ", columnCount > 7 ? dgvCreateTextFile.Columns[13].HeaderText : "",
" ", columnCount > 8 ? dgvCreateTextFile.Columns[14].HeaderText : "", " ", columnCount > 9 ? dgvCreateTextFile.Columns[15].HeaderText : "",
" ", columnCount > 10 ? dgvCreateTextFile.Columns[16].HeaderText : "", " ", columnCount > 11 ? dgvCreateTextFile.Columns[17].HeaderText : "",
" ", columnCount > 12 ? dgvCreateTextFile.Columns[18].HeaderText : "", " ", columnCount > 13 ? dgvCreateTextFile.Columns[19].HeaderText : "",
" ", columnCount > 14 ? dgvCreateTextFile.Columns[20].HeaderText : "", " ", columnCount > 15 ? dgvCreateTextFile.Columns[21].HeaderText : "",
" ", columnCount > 16 ? dgvCreateTextFile.Columns[22].HeaderText : "", " ", columnCount > 17 ? dgvCreateTextFile.Columns[23].HeaderText : "",
" ", columnCount > 18 ? dgvCreateTextFile.Columns[24].HeaderText : "", " ", columnCount > 19 ? dgvCreateTextFile.Columns[25].HeaderText : "",
" ", columnCount > 20 ? dgvCreateTextFile.Columns[26].HeaderText : "", " ", columnCount > 21 ? dgvCreateTextFile.Columns[27].HeaderText : "",
" ", columnCount > 22 ? dgvCreateTextFile.Columns[28].HeaderText : "", " ", columnCount > 23 ? dgvCreateTextFile.Columns[29].HeaderText : "",
" ", columnCount > 24 ? dgvCreateTextFile.Columns[30].HeaderText : "", " ", columnCount > 25 ? dgvCreateTextFile.Columns[31].HeaderText : "",
" ", columnCount > 26 ? dgvCreateTextFile.Columns[32].HeaderText : "", " ", columnCount > 27 ? dgvCreateTextFile.Columns[33].HeaderText : "",
" ", columnCount > 28 ? dgvCreateTextFile.Columns[34].HeaderText : "", " ", columnCount > 29 ? dgvCreateTextFile.Columns[35].HeaderText : "",
" ", columnCount > 30 ? dgvCreateTextFile.Columns[36].HeaderText : "", " ", columnCount > 31 ? dgvCreateTextFile.Columns[37].HeaderText : "",
" ", columnCount > 32 ? dgvCreateTextFile.Columns[38].HeaderText : "", " ", columnCount > 33 ? dgvCreateTextFile.Columns[39].HeaderText : "",
" ", columnCount > 34 ? dgvCreateTextFile.Columns[40].HeaderText : "", " ", columnCount > 35 ? dgvCreateTextFile.Columns[41].HeaderText : "",
" ", columnCount > 36 ? dgvCreateTextFile.Columns[42].HeaderText : "", " ", columnCount > 37 ? dgvCreateTextFile.Columns[43].HeaderText : "",
" ", columnCount > 38 ? dgvCreateTextFile.Columns[44].HeaderText : "", " ", columnCount > 39 ? dgvCreateTextFile.Columns[45].HeaderText : "",
" ", columnCount > 40 ? dgvCreateTextFile.Columns[46].HeaderText : "", " ", columnCount > 41 ? dgvCreateTextFile.Columns[47].HeaderText : "",
" ", columnCount > 42 ? dgvCreateTextFile.Columns[48].HeaderText : "", " ", columnCount > 43 ? dgvCreateTextFile.Columns[49].HeaderText : "",
" ", columnCount > 44 ? dgvCreateTextFile.Columns[50].HeaderText : "", " ", columnCount > 45 ? dgvCreateTextFile.Columns[51].HeaderText : "",
" ", columnCount > 46 ? dgvCreateTextFile.Columns[52].HeaderText : "", " ", columnCount > 47 ? dgvCreateTextFile.Columns[53].HeaderText : "",
" ", columnCount > 48 ? dgvCreateTextFile.Columns[54].HeaderText : "", " ", columnCount > 49 ? dgvCreateTextFile.Columns[55].HeaderText : ""
);
}
DataTable dtFile = ft.GetXMLorTextFileFieldsByFileName(cbItems.Text);
for (int i = 0; i < finallistCTF.Count; i++)
{
string str = ft.getFormatStr(finallistCTF[i].Column1, dtFile, 0) + ft.getSeprStr(dtFile, 0)
+ ft.getFormatStr(finallistCTF[i].Column2, dtFile, 1) + ft.getSeprStr(dtFile, 1)
+ ft.getFormatStr(finallistCTF[i].Column3, dtFile, 2) + ft.getSeprStr(dtFile, 2)
+ ft.getFormatStr(finallistCTF[i].Column4, dtFile, 3) + ft.getSeprStr(dtFile, 3)
+ ft.getFormatStr(finallistCTF[i].Column5, dtFile, 4) + ft.getSeprStr(dtFile, 4)
+ ft.getFormatStr(finallistCTF[i].Column6, dtFile, 5) + ft.getSeprStr(dtFile, 5)
+ ft.getFormatStr(finallistCTF[i].Column7, dtFile, 6) + ft.getSeprStr(dtFile, 6)
+ ft.getFormatStr(finallistCTF[i].Column8, dtFile, 7) + ft.getSeprStr(dtFile, 7)
+ ft.getFormatStr(finallistCTF[i].Column9, dtFile, 8) + ft.getSeprStr(dtFile, 8)
+ ft.getFormatStr(finallistCTF[i].Column10, dtFile, 9) + ft.getSeprStr(dtFile, 9)
+ ft.getFormatStr(finallistCTF[i].Column11, dtFile, 10) + ft.getSeprStr(dtFile, 10)
+ ft.getFormatStr(finallistCTF[i].Column12, dtFile, 11) + ft.getSeprStr(dtFile, 11)
+ ft.getFormatStr(finallistCTF[i].Column13, dtFile, 12) + ft.getSeprStr(dtFile, 12)
+ ft.getFormatStr(finallistCTF[i].Column14, dtFile, 13) + ft.getSeprStr(dtFile, 13)
+ ft.getFormatStr(finallistCTF[i].Column15, dtFile, 14) + ft.getSeprStr(dtFile, 14)
+ ft.getFormatStr(finallistCTF[i].Column16, dtFile, 15) + ft.getSeprStr(dtFile, 15)
+ ft.getFormatStr(finallistCTF[i].Column17, dtFile, 16) + ft.getSeprStr(dtFile, 16)
+ ft.getFormatStr(finallistCTF[i].Column18, dtFile, 17) + ft.getSeprStr(dtFile, 17)
+ ft.getFormatStr(finallistCTF[i].Column19, dtFile, 18) + ft.getSeprStr(dtFile, 18)
+ ft.getFormatStr(finallistCTF[i].Column20, dtFile, 19) + ft.getSeprStr(dtFile, 19)
+ ft.getFormatStr(finallistCTF[i].Column21, dtFile, 20) + ft.getSeprStr(dtFile, 20)
+ ft.getFormatStr(finallistCTF[i].Column22, dtFile, 21) + ft.getSeprStr(dtFile, 21)
+ ft.getFormatStr(finallistCTF[i].Column23, dtFile, 22) + ft.getSeprStr(dtFile, 22)
+ ft.getFormatStr(finallistCTF[i].Column24, dtFile, 23) + ft.getSeprStr(dtFile, 23)
+ ft.getFormatStr(finallistCTF[i].Column25, dtFile, 24) + ft.getSeprStr(dtFile, 24)
+ ft.getFormatStr(finallistCTF[i].Column26, dtFile, 25) + ft.getSeprStr(dtFile, 25)
+ ft.getFormatStr(finallistCTF[i].Column27, dtFile, 26) + ft.getSeprStr(dtFile, 26)
+ ft.getFormatStr(finallistCTF[i].Column28, dtFile, 27) + ft.getSeprStr(dtFile, 27)
+ ft.getFormatStr(finallistCTF[i].Column29, dtFile, 28) + ft.getSeprStr(dtFile, 28)
+ ft.getFormatStr(finallistCTF[i].Column30, dtFile, 29) + ft.getSeprStr(dtFile, 29)
+ ft.getFormatStr(finallistCTF[i].Column31, dtFile, 30) + ft.getSeprStr(dtFile, 30)
+ ft.getFormatStr(finallistCTF[i].Column32, dtFile, 31) + ft.getSeprStr(dtFile, 31)
+ ft.getFormatStr(finallistCTF[i].Column33, dtFile, 32) + ft.getSeprStr(dtFile, 32)
+ ft.getFormatStr(finallistCTF[i].Column34, dtFile, 33) + ft.getSeprStr(dtFile, 33)
+ ft.getFormatStr(finallistCTF[i].Column35, dtFile, 34) + ft.getSeprStr(dtFile, 34)
+ ft.getFormatStr(finallistCTF[i].Column36, dtFile, 35) + ft.getSeprStr(dtFile, 35)
+ ft.getFormatStr(finallistCTF[i].Column37, dtFile, 36) + ft.getSeprStr(dtFile, 36)
+ ft.getFormatStr(finallistCTF[i].Column38, dtFile, 37) + ft.getSeprStr(dtFile, 37)
+ ft.getFormatStr(finallistCTF[i].Column39, dtFile, 38) + ft.getSeprStr(dtFile, 38)
+ ft.getFormatStr(finallistCTF[i].Column40, dtFile, 39) + ft.getSeprStr(dtFile, 39)
+ ft.getFormatStr(finallistCTF[i].Column41, dtFile, 40) + ft.getSeprStr(dtFile, 40)
+ ft.getFormatStr(finallistCTF[i].Column42, dtFile, 41) + ft.getSeprStr(dtFile, 41)
+ ft.getFormatStr(finallistCTF[i].Column43, dtFile, 42) + ft.getSeprStr(dtFile, 42)
+ ft.getFormatStr(finallistCTF[i].Column44, dtFile, 43) + ft.getSeprStr(dtFile, 43)
+ ft.getFormatStr(finallistCTF[i].Column45, dtFile, 44) + ft.getSeprStr(dtFile, 44)
+ ft.getFormatStr(finallistCTF[i].Column46, dtFile, 45) + ft.getSeprStr(dtFile, 45)
+ ft.getFormatStr(finallistCTF[i].Column47, dtFile, 46) + ft.getSeprStr(dtFile, 46)
+ ft.getFormatStr(finallistCTF[i].Column48, dtFile, 47) + ft.getSeprStr(dtFile, 47)
+ ft.getFormatStr(finallistCTF[i].Column49, dtFile, 48) + ft.getSeprStr(dtFile, 48)
+ ft.getFormatStr(finallistCTF[i].Column50, dtFile, 49) + ft.getSeprStr(dtFile, 49)
;
sw.WriteLine(str.Replace("\\r", "\r").Replace("\\n", "\n"));
}
sw.Close();
if (sender != null)
MessageBox.Show("Success!", "Message - RSystems FinanceTools", MessageBoxButtons.OK, MessageBoxIcon.Information);
else
SessionInfo.UserInfo.GlobalError += "Process:" + cbItems.Text + "(" + SessionInfo.UserInfo.CurrentRef + ") - Success! ";
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
public void cbXMLOrText_SelectedIndexChanged(object sender, EventArgs e)
{
cbItems.Items.Clear();
this.panel17.Controls.Clear();
if (cbXMLOrText.Text == "XML")
{
DataTable dt = ft.GetXMLorTextFileNames(1);
for (int i = 0; i < dt.Rows.Count; i++)
cbItems.Items.Add(dt.Rows[i]["RelatedName"].ToString());
}
else
{
DataTable dt = ft.GetXMLorTextFileNames(0);
for (int i = 0; i < dt.Rows.Count; i++)
cbItems.Items.Add(dt.Rows[i]["RelatedName"].ToString());
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
public void cbItems_SelectedIndexChanged(object sender, EventArgs e)
{
if (cbXMLOrText.Text == "XML")
{
string comname = cbItems.Text.Substring(0, cbItems.Text.LastIndexOf(","));
string methodName = cbItems.Text.Substring(cbItems.Text.LastIndexOf(",") + 1);
DataTable dt = ft.GetXMLorTextFileFieldsByComName(comname, methodName);
BindCreateTextFileDGV(dt);
}
else
{
string fileName = cbItems.Text;
DataTable dt = ft.GetXMLorTextFileFieldsByFileName(fileName);
BindCreateTextFileDGV(dt);
}
}
/// <summary>
///
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void btnSetMax_Click(object sender, EventArgs e)
{
setMax s = new setMax();
s.ShowDialog();
}
}
}
| 67.280488 | 640 | 0.551165 | [
"MIT"
] | aaronuhmgmailcom/Financel | PDFView(32bit Office) v2/PDFView(32bit Office) v2/PDFView(32bit Office) v2/Backup/OutputContainer.cs | 220,692 | C# |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Xml.Linq;
using Clifton.Core.Assertions;
using Clifton.Core.ExtensionMethods;
using Clifton.Core.Semantics;
using Clifton.Core.ModuleManagement;
using Clifton.Core.ServiceManagement;
namespace SemanticPublisherSubscriberDemo
{
static partial class Program
{
public static ServiceManager serviceManager;
public static void InitializeBootstrap()
{
serviceManager = new ServiceManager();
serviceManager.RegisterSingleton<IServiceModuleManager, ServiceModuleManager>();
}
public static void Bootstrap(Action<Exception> onBootstrapException)
{
try
{
IModuleManager moduleMgr = (IModuleManager)serviceManager.Get<IServiceModuleManager>();
List<AssemblyFileName> modules = GetModuleList(XmlFileName.Create("modules.xml"));
moduleMgr.RegisterModules(modules);
serviceManager.FinishedInitialization();
}
catch (Exception ex)
{
onBootstrapException(ex);
}
}
/// <summary>
/// Return the list of assembly names specified in the XML file so that
/// we know what assemblies are considered modules as part of the application.
/// </summary>
private static List<AssemblyFileName> GetModuleList(XmlFileName filename)
{
Assert.That(File.Exists(filename.Value), "Module definition file " + filename.Value + " does not exist.");
XDocument xdoc = XDocument.Load(filename.Value);
return GetModuleList(xdoc);
}
/// <summary>
/// Returns the list of modules specified in the XML document so we know what
/// modules to instantiate.
/// </summary>
private static List<AssemblyFileName> GetModuleList(XDocument xdoc)
{
List<AssemblyFileName> assemblies = new List<AssemblyFileName>();
(from module in xdoc.Element("Modules").Elements("Module")
select module.Attribute("AssemblyName").Value).ForEach(s => assemblies.Add(AssemblyFileName.Create(s)));
return assemblies;
}
}
}
| 34.80303 | 118 | 0.640401 | [
"MIT"
] | cliftonm/clifton | demos/SemanticPublisherSubscriber/Bootstrap.cs | 2,299 | C# |
using log4net.Core;
using System;
using System.Globalization;
using System.IO;
namespace log4net.Layout.Pattern
{
/// <summary>
/// Converter to output the relative time of the event
/// </summary>
/// <remarks>
/// <para>
/// Converter to output the time of the event relative to the start of the program.
/// </para>
/// </remarks>
/// <author>Nicko Cadell</author>
internal sealed class RelativeTimePatternConverter : PatternLayoutConverter
{
/// <summary>
/// Write the relative time to the output
/// </summary>
/// <param name="writer"><see cref="T:System.IO.TextWriter" /> that will receive the formatted result.</param>
/// <param name="loggingEvent">the event being logged</param>
/// <remarks>
/// <para>
/// Writes out the relative time of the event in milliseconds.
/// That is the number of milliseconds between the event <see cref="P:log4net.Core.LoggingEvent.TimeStamp" />
/// and the <see cref="P:log4net.Core.LoggingEvent.StartTime" />.
/// </para>
/// </remarks>
protected override void Convert(TextWriter writer, LoggingEvent loggingEvent)
{
writer.Write(TimeDifferenceInMillis(LoggingEvent.StartTime, loggingEvent.TimeStamp).ToString(NumberFormatInfo.InvariantInfo));
}
/// <summary>
/// Helper method to get the time difference between two DateTime objects
/// </summary>
/// <param name="start">start time (in the current local time zone)</param>
/// <param name="end">end time (in the current local time zone)</param>
/// <returns>the time difference in milliseconds</returns>
private static long TimeDifferenceInMillis(DateTime start, DateTime end)
{
return (long)(end.ToUniversalTime() - start.ToUniversalTime()).TotalMilliseconds;
}
}
}
| 36.0625 | 129 | 0.707106 | [
"MIT"
] | HuyTruong19x/DDTank4.1 | Source Server/SourceQuest4.5/log4net/log4net.Layout.Pattern/RelativeTimePatternConverter.cs | 1,731 | C# |
namespace ComponentTests
{
using System.Collections.Generic;
using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using WebApi;
/// <summary>
/// </summary>
public sealed class CustomWebApplicationFactory : WebApplicationFactory<Startup>
{
protected override void ConfigureWebHost(IWebHostBuilder builder) => builder.ConfigureAppConfiguration(
(context, config) =>
{
config.AddInMemoryCollection(
new Dictionary<string, string> {["PersistenceModule:UseFake"] = "true"});
}).ConfigureServices(services =>
{
services.AddAuthentication(x =>
{
x.DefaultAuthenticateScheme = "Test";
x.DefaultChallengeScheme = "Test";
})
.AddScheme<AuthenticationSchemeOptions, TestAuthenticationHandler>(
"Test", options => { });
});
}
}
| 35.5625 | 111 | 0.611599 | [
"Apache-2.0"
] | gfragoso/clean-architecture-manga | test/ComponentTests/CustomWebApplicationFactory.cs | 1,138 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.ML.Core.Data;
using Microsoft.ML.Runtime.CommandLine;
using Microsoft.ML.Runtime.Data;
using Microsoft.ML.Runtime.Data.Conversion;
using Microsoft.ML.Runtime.FastTree.Internal;
using Microsoft.ML.Runtime.Internal.Calibration;
using Microsoft.ML.Runtime.Internal.Internallearn;
using Microsoft.ML.Runtime.Internal.Utilities;
using Microsoft.ML.Runtime.Model;
using Microsoft.ML.Runtime.Model.Onnx;
using Microsoft.ML.Runtime.Model.Pfa;
using Microsoft.ML.Runtime.Training;
using Microsoft.ML.Runtime.TreePredictor;
using Newtonsoft.Json.Linq;
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using Float = System.Single;
// All of these reviews apply in general to fast tree and random forest implementations.
//REVIEW: Decouple train method in Application.cs to have boosting and random forest logic seperate.
//REVIEW: Do we need to keep all the fast tree based testers?
namespace Microsoft.ML.Runtime.FastTree
{
public delegate void SignatureTreeEnsembleTrainer();
/// <summary>
/// FastTreeTrainerBase is generic class and can't have shared object among classes.
/// This class is to provide common for all classes object which we can use for lock purpose.
/// </summary>
internal static class FastTreeShared
{
public static readonly object TrainLock = new object();
}
public abstract class FastTreeTrainerBase<TArgs, TTransformer, TModel> :
TrainerEstimatorBase<TTransformer, TModel>
where TTransformer: ISingleFeaturePredictionTransformer<TModel>
where TArgs : TreeArgs, new()
where TModel : IPredictorProducing<Float>
{
protected readonly TArgs Args;
protected readonly bool AllowGC;
protected Ensemble TrainedEnsemble;
protected int FeatureCount;
protected RoleMappedData ValidData;
protected IParallelTraining ParallelTraining;
protected OptimizationAlgorithm OptimizationAlgorithm;
protected Dataset TrainSet;
protected Dataset ValidSet;
protected Dataset[] TestSets;
protected int[] FeatureMap;
protected List<Test> Tests;
protected TestHistory PruningTest;
protected int[] CategoricalFeatures;
// Test for early stopping.
protected Test TrainTest;
protected Test ValidTest;
protected double[] InitTrainScores;
protected double[] InitValidScores;
protected double[][] InitTestScores;
//protected int Iteration;
protected Ensemble Ensemble;
protected bool HasValidSet => ValidSet != null;
private const string RegisterName = "FastTreeTraining";
// random for active features selection
private Random _featureSelectionRandom;
protected string InnerArgs => CmdParser.GetSettings(Host, Args, new TArgs());
public override TrainerInfo Info { get; }
public bool HasCategoricalFeatures => Utils.Size(CategoricalFeatures) > 0;
private protected virtual bool NeedCalibration => false;
/// <summary>
/// Constructor to use when instantiating the classes deriving from here through the API.
/// </summary>
private protected FastTreeTrainerBase(IHostEnvironment env, SchemaShape.Column label, string featureColumn,
string weightColumn = null, string groupIdColumn = null, Action<TArgs> advancedSettings = null)
: base(Contracts.CheckRef(env, nameof(env)).Register(RegisterName), TrainerUtils.MakeR4VecFeature(featureColumn), label, TrainerUtils.MakeR4ScalarWeightColumn(weightColumn))
{
Args = new TArgs();
//apply the advanced args, if the user supplied any
advancedSettings?.Invoke(Args);
// check that the users didn't specify different label, group, feature, weights in the args, from what they supplied directly
TrainerUtils.CheckArgsHaveDefaultColNames(Host, Args);
Args.LabelColumn = label.Name;
Args.FeatureColumn = featureColumn;
if (weightColumn != null)
Args.WeightColumn = weightColumn;
if (groupIdColumn != null)
Args.GroupIdColumn = groupIdColumn;
// The discretization step renders this trainer non-parametric, and therefore it does not need normalization.
// Also since it builds its own internal discretized columnar structures, it cannot benefit from caching.
// Finally, even the binary classifiers, being logitboost, tend to not benefit from external calibration.
Info = new TrainerInfo(normalization: false, caching: false, calibration: NeedCalibration, supportValid: true);
// REVIEW: CLR 4.6 has a bug that is only exposed in Scope, and if we trigger GC.Collect in scope environment
// with memory consumption more than 5GB, GC get stuck in infinite loop. So for now let's call GC only if we call things from LocalEnvironment.
AllowGC = (env is HostEnvironmentBase<LocalEnvironment>);
Initialize(env);
}
/// <summary>
/// Legacy constructor that is used when invoking the classes deriving from this, through maml.
/// </summary>
private protected FastTreeTrainerBase(IHostEnvironment env, TArgs args, SchemaShape.Column label)
: base(Contracts.CheckRef(env, nameof(env)).Register(RegisterName), TrainerUtils.MakeR4VecFeature(args.FeatureColumn), label, TrainerUtils.MakeR4ScalarWeightColumn(args.WeightColumn))
{
Host.CheckValue(args, nameof(args));
Args = args;
// The discretization step renders this trainer non-parametric, and therefore it does not need normalization.
// Also since it builds its own internal discretized columnar structures, it cannot benefit from caching.
// Finally, even the binary classifiers, being logitboost, tend to not benefit from external calibration.
Info = new TrainerInfo(normalization: false, caching: false, calibration: NeedCalibration, supportValid: true);
// REVIEW: CLR 4.6 has a bug that is only exposed in Scope, and if we trigger GC.Collect in scope environment
// with memory consumption more than 5GB, GC get stuck in infinite loop. So for now let's call GC only if we call things from LocalEnvironment.
AllowGC = (env is HostEnvironmentBase<LocalEnvironment>);
Initialize(env);
}
protected abstract void PrepareLabels(IChannel ch);
protected abstract void InitializeTests();
protected abstract Test ConstructTestForTrainingData();
protected abstract OptimizationAlgorithm ConstructOptimizationAlgorithm(IChannel ch);
protected abstract TreeLearner ConstructTreeLearner(IChannel ch);
protected abstract ObjectiveFunctionBase ConstructObjFunc(IChannel ch);
protected virtual Float GetMaxLabel()
{
return Float.PositiveInfinity;
}
/// <summary>
/// If, after applying the advancedSettings delegate, the args are different that the default value
/// and are also different than the value supplied directly to the xtension method, warn the user
/// about which value is being used.
/// The parameters that appear here, numTrees, minDocumentsInLeafs, numLeaves, learningRate are the ones the users are most likely to tune.
/// This list should follow the one in the constructor, and the extension methods on the <see cref="TrainContextBase"/>.
/// REVIEW: we should somehow mark the arguments that are set apart in those two places. Currently they stand out by their sort order annotation.
/// </summary>
protected void CheckArgsAndAdvancedSettingMismatch(int numLeaves,
int numTrees,
int minDocumentsInLeafs,
double learningRate,
BoostedTreeArgs snapshot,
BoostedTreeArgs currentArgs)
{
using (var ch = Host.Start("Comparing advanced settings with the directly provided values."))
{
// Check that the user didn't supply different parameters in the args, from what it specified directly.
TrainerUtils.CheckArgsAndAdvancedSettingMismatch(ch, numLeaves, snapshot.NumLeaves, currentArgs.NumLeaves, nameof(numLeaves));
TrainerUtils.CheckArgsAndAdvancedSettingMismatch(ch, numTrees, snapshot.NumTrees, currentArgs.NumTrees, nameof(numTrees));
TrainerUtils.CheckArgsAndAdvancedSettingMismatch(ch, minDocumentsInLeafs, snapshot.MinDocumentsInLeafs, currentArgs.MinDocumentsInLeafs, nameof(minDocumentsInLeafs));
TrainerUtils.CheckArgsAndAdvancedSettingMismatch(ch, learningRate, snapshot.LearningRates, currentArgs.LearningRates, nameof(learningRate));
}
}
private void Initialize(IHostEnvironment env)
{
int numThreads = Args.NumThreads ?? Environment.ProcessorCount;
if (Host.ConcurrencyFactor > 0 && numThreads > Host.ConcurrencyFactor)
{
using (var ch = Host.Start("FastTreeTrainerBase"))
{
numThreads = Host.ConcurrencyFactor;
ch.Warning("The number of threads specified in trainer arguments is larger than the concurrency factor "
+ "setting of the environment. Using {0} training threads instead.", numThreads);
}
}
ParallelTraining = Args.ParallelTrainer != null ? Args.ParallelTrainer.CreateComponent(env) : new SingleTrainer();
ParallelTraining.InitEnvironment();
Tests = new List<Test>();
InitializeThreads(numThreads);
}
protected void ConvertData(RoleMappedData trainData)
{
trainData.Schema.Schema.TryGetColumnIndex(DefaultColumnNames.Features, out int featureIndex);
MetadataUtils.TryGetCategoricalFeatureIndices(trainData.Schema.Schema, featureIndex, out CategoricalFeatures);
var useTranspose = UseTranspose(Args.DiskTranspose, trainData) && (ValidData == null || UseTranspose(Args.DiskTranspose, ValidData));
var instanceConverter = new ExamplesToFastTreeBins(Host, Args.MaxBins, useTranspose, !Args.FeatureFlocks, Args.MinDocumentsInLeafs, GetMaxLabel());
TrainSet = instanceConverter.FindBinsAndReturnDataset(trainData, PredictionKind, ParallelTraining, CategoricalFeatures, Args.CategoricalSplit);
FeatureMap = instanceConverter.FeatureMap;
if (ValidData != null)
ValidSet = instanceConverter.GetCompatibleDataset(ValidData, PredictionKind, CategoricalFeatures, Args.CategoricalSplit);
}
private bool UseTranspose(bool? useTranspose, RoleMappedData data)
{
Host.AssertValue(data);
Host.AssertValue(data.Schema.Feature);
if (useTranspose.HasValue)
return useTranspose.Value;
ITransposeDataView td = data.Data as ITransposeDataView;
return td != null && td.TransposeSchema.GetSlotType(data.Schema.Feature.Index) != null;
}
protected void TrainCore(IChannel ch)
{
Contracts.CheckValue(ch, nameof(ch));
// REVIEW:Get rid of this lock then we completly remove all static classes from FastTree such as BlockingThreadPool.
lock (FastTreeShared.TrainLock)
{
using (Timer.Time(TimerEvent.TotalInitialization))
{
CheckArgs(ch);
PrintPrologInfo(ch);
Initialize(ch);
PrintMemoryStats(ch);
}
using (Timer.Time(TimerEvent.TotalTrain))
Train(ch);
if (Args.ExecutionTimes)
PrintExecutionTimes(ch);
TrainedEnsemble = Ensemble;
if (FeatureMap != null)
TrainedEnsemble.RemapFeatures(FeatureMap);
ParallelTraining.FinalizeEnvironment();
}
}
protected virtual bool ShouldStop(IChannel ch, ref IEarlyStoppingCriterion earlyStopping, ref int bestIteration)
{
bestIteration = Ensemble.NumTrees;
return false;
}
protected virtual int GetBestIteration(IChannel ch) => Ensemble.NumTrees;
protected virtual void InitializeThreads(int numThreads)
{
ThreadTaskManager.Initialize(numThreads);
}
protected virtual void PrintExecutionTimes(IChannel ch)
{
ch.Info("Execution time breakdown:\n{0}", Timer.GetString());
}
protected virtual void CheckArgs(IChannel ch)
{
Args.Check(ch);
IntArray.CompatibilityLevel = Args.FeatureCompressionLevel;
// change arguments
if (Args.HistogramPoolSize < 2)
Args.HistogramPoolSize = Args.NumLeaves * 2 / 3;
if (Args.HistogramPoolSize > Args.NumLeaves - 1)
Args.HistogramPoolSize = Args.NumLeaves - 1;
if (Args.BaggingSize > 0)
{
int bagCount = Args.NumTrees / Args.BaggingSize;
if (bagCount * Args.BaggingSize != Args.NumTrees)
throw ch.Except("Number of trees should be a multiple of number bag size");
}
if (!(0 <= Args.GainConfidenceLevel && Args.GainConfidenceLevel < 1))
throw ch.Except("Gain confidence level must be in the range [0,1)");
#if OLD_DATALOAD
#if !NO_STORE
if (_args.offloadBinsToFileStore)
{
if (!string.IsNullOrEmpty(_args.offloadBinsDirectory) && !Directory.Exists(_args.offloadBinsDirectory))
{
try
{
Directory.CreateDirectory(_args.offloadBinsDirectory);
}
catch (Exception e)
{
throw ch.Except(e, "Failure creating bins offload directory {0} - Exception {1}", _args.offloadBinsDirectory, e.Message);
}
}
}
#endif
#endif
}
/// <summary>
/// A virtual method that is used to print header of test graph.
/// Appliations that need printing test graph are supposed to override
/// it to print specific test graph header.
/// </summary>
/// <returns> string representation of test graph header </returns>
protected virtual string GetTestGraphHeader() => string.Empty;
/// <summary>
/// A virtual method that is used to print a single line of test graph.
/// Applications that need printing test graph are supposed to override
/// it to print a specific line of test graph after a new iteration is finished.
/// </summary>
/// <returns> string representation of a line of test graph </returns>
protected virtual string GetTestGraphLine() => string.Empty;
/// <summary>
/// A virtual method that is used to compute test results after each iteration is finished.
/// </summary>
protected virtual void ComputeTests()
{
}
protected void PrintTestGraph(IChannel ch)
{
// we call Tests computing no matter whether we require to print test graph
ComputeTests();
if (!Args.PrintTestGraph)
return;
if (Ensemble.NumTrees == 0)
ch.Info(GetTestGraphHeader());
else
ch.Info(GetTestGraphLine());
return;
}
protected virtual void Initialize(IChannel ch)
{
#region Load/Initialize State
using (Timer.Time(TimerEvent.InitializeLabels))
PrepareLabels(ch);
using (Timer.Time(TimerEvent.InitializeTraining))
{
InitializeEnsemble();
OptimizationAlgorithm = ConstructOptimizationAlgorithm(ch);
}
using (Timer.Time(TimerEvent.InitializeTests))
InitializeTests();
if (AllowGC)
{
GC.Collect(2, GCCollectionMode.Forced);
GC.Collect(2, GCCollectionMode.Forced);
}
#endregion
}
#if !NO_STORE
/// <summary>
/// Calculates the percentage of feature bins that will fit into memory based on current available memory in the machine.
/// </summary>
/// <returns>A float number between 0 and 1 indicating the percentage of features to load.
/// The number will not be smaller than two times the feature fraction value</returns>
private float GetFeaturePercentInMemory(IChannel ch)
{
const float maxFeaturePercentValue = 1.0f;
float availableMemory = GetMachineAvailableBytes();
ch.Info("Available memory in the machine is = {0} bytes", availableMemory.ToString("N", CultureInfo.InvariantCulture));
float minFeaturePercentThreshold = _args.preloadFeatureBinsBeforeTraining ? (float)_args.featureFraction * 2 : (float)_args.featureFraction;
if (minFeaturePercentThreshold >= maxFeaturePercentValue)
{
return maxFeaturePercentValue;
}
// Initial free memory allowance in bytes for single and parallel fastrank modes
float freeMemoryAllowance = 1024 * 1024 * 512;
if (_optimizationAlgorithm.TreeLearner != null)
{
// Get the size of memory in bytes needed by the tree learner internal data structures
freeMemoryAllowance += _optimizationAlgorithm.TreeLearner.GetSizeOfReservedMemory();
}
availableMemory = (availableMemory > freeMemoryAllowance) ? availableMemory - freeMemoryAllowance : 0;
long featureSize = TrainSet.FeatureSetSize;
if (ValidSet != null)
{
featureSize += ValidSet.FeatureSetSize;
}
if (TestSets != null)
{
foreach (var item in TestSets)
{
featureSize += item.FeatureSetSize;
}
}
ch.Info("Total Feature bins size is = {0} bytes", featureSize.ToString("N", CultureInfo.InvariantCulture));
return Math.Min(Math.Max(minFeaturePercentThreshold, availableMemory / featureSize), maxFeaturePercentValue);
}
#endif
protected bool[] GetActiveFeatures()
{
var activeFeatures = Utils.CreateArray(TrainSet.NumFeatures, true);
if (Args.FeatureFraction < 1.0)
{
if (_featureSelectionRandom == null)
_featureSelectionRandom = new Random(Args.FeatureSelectSeed);
for (int i = 0; i < TrainSet.NumFeatures; ++i)
{
if (activeFeatures[i])
activeFeatures[i] = _featureSelectionRandom.NextDouble() <= Args.FeatureFraction;
}
}
return activeFeatures;
}
private string GetDatasetStatistics(Dataset set)
{
long datasetSize = set.SizeInBytes();
int skeletonSize = set.Skeleton.SizeInBytes();
return string.Format("set contains {0} query-doc pairs in {1} queries with {2} features and uses {3} MB ({4} MB for features)",
set.NumDocs, set.NumQueries, set.NumFeatures, datasetSize / 1024 / 1024, (datasetSize - skeletonSize) / 1024 / 1024);
}
protected virtual void PrintMemoryStats(IChannel ch)
{
Contracts.AssertValue(ch);
ch.Trace("Training {0}", GetDatasetStatistics(TrainSet));
if (ValidSet != null)
ch.Trace("Validation {0}", GetDatasetStatistics(ValidSet));
if (TestSets != null)
{
for (int i = 0; i < TestSets.Length; ++i)
ch.Trace("ComputeTests[{1}] {0}",
GetDatasetStatistics(TestSets[i]), i);
}
if (AllowGC)
ch.Trace("GC Total Memory = {0} MB", GC.GetTotalMemory(true) / 1024 / 1024);
Process currentProcess = Process.GetCurrentProcess();
ch.Trace("Working Set = {0} MB", currentProcess.WorkingSet64 / 1024 / 1024);
ch.Trace("Virtual Memory = {0} MB",
currentProcess.VirtualMemorySize64 / 1024 / 1024);
ch.Trace("Private Memory = {0} MB",
currentProcess.PrivateMemorySize64 / 1024 / 1024);
ch.Trace("Peak Working Set = {0} MB", currentProcess.PeakWorkingSet64 / 1024 / 1024);
ch.Trace("Peak Virtual Memory = {0} MB",
currentProcess.PeakVirtualMemorySize64 / 1024 / 1024);
}
protected bool AreSamplesWeighted(IChannel ch)
{
return TrainSet.SampleWeights != null;
}
private void InitializeEnsemble()
{
Ensemble = new Ensemble();
}
/// <summary>
/// Creates weights wrapping (possibly, trivial) for gradient target values.
/// </summary>
protected virtual IGradientAdjuster MakeGradientWrapper(IChannel ch)
{
if (AreSamplesWeighted(ch))
return new QueryWeightsGradientWrapper();
else
return new TrivialGradientWrapper();
}
#if !NO_STORE
/// <summary>
/// Unloads feature bins being used in the current iteration.
/// </summary>
/// <param name="featureToUnload">Boolean array indicating the features to unload</param>
private void UnloadFeatureBins(bool[] featureToUnload)
{
foreach (ScoreTracker scoreTracker in this._optimizationAlgorithm.TrackedScores)
{
for (int i = 0; i < scoreTracker.Dataset.Features.Length; i++)
{
if (featureToUnload[i])
{
// Only return buffers to the pool that were allocated using the pool
// So far only this type of IntArrays below have buffer pool support.
// This is to avoid unexpected leaks in case a new IntArray is added but we are not allocating it from the pool.
if (scoreTracker.Dataset.Features[i].Bins is DenseIntArray ||
scoreTracker.Dataset.Features[i].Bins is DeltaSparseIntArray ||
scoreTracker.Dataset.Features[i].Bins is DeltaRepeatIntArray)
{
scoreTracker.Dataset.Features[i].Bins.ReturnBuffer();
scoreTracker.Dataset.Features[i].Bins = null;
}
}
}
}
}
/// <summary>
/// Worker thread delegate that loads features for the next training iteration
/// </summary>
/// <param name="state">thread state object</param>
private void LazyFeatureLoad(object state)
{
bool[] featuresToLoad = (bool[])state;
foreach (ScoreTracker scoreTracker in this._optimizationAlgorithm.TrackedScores)
{
for (int i = 0; i < scoreTracker.Dataset.Features.Length; i++)
{
if (featuresToLoad[i])
{
// just using the Bins property so feature bins are loaded into memory
IntArray bins = scoreTracker.Dataset.Features[i].Bins;
}
}
}
}
/// <summary>
/// Iterates through the feature sets needed in future tree training iterations (i.e. in ActiveFeatureSetQueue),
/// using the same order as they were enqueued, and it returns the initial active features based on the percentage parameter.
/// </summary>
/// <param name="pctFeatureThreshold">A float value between 0 and 1 indicating maximum percentage of features to return</param>
/// <returns>Array indicating calculated feature list</returns>
private bool[] GetNextFeaturesByThreshold(float pctFeatureThreshold)
{
int totalUniqueFeatureCount = 0;
bool[] nextActiveFeatures = new bool[TrainSet.NumFeatures];
if (pctFeatureThreshold == 1.0f)
{
// return all features to load
return nextActiveFeatures.Select(x => x = true).ToArray();
}
int maxNumberOfFeatures = (int)(pctFeatureThreshold * TrainSet.NumFeatures);
for (int i = 0; i < _activeFeatureSetQueue.Count; i++)
{
bool[] tempActiveFeatures = _activeFeatureSetQueue.ElementAt(i);
for (int j = 0; j < tempActiveFeatures.Length; j++)
{
if (tempActiveFeatures[j] && !nextActiveFeatures[j])
{
nextActiveFeatures[j] = true;
if (totalUniqueFeatureCount++ > maxNumberOfFeatures)
return nextActiveFeatures;
}
}
}
return nextActiveFeatures;
}
/// <summary>
/// Adds several items in the ActiveFeature queue
/// </summary>
/// <param name="numberOfItems">Number of items to add</param>
private void GenerateActiveFeatureLists(int numberOfItems)
{
for (int i = 0; i < numberOfItems; i++)
{
_activeFeatureSetQueue.Enqueue(GetActiveFeatures());
}
}
#endif
protected virtual BaggingProvider CreateBaggingProvider()
{
Contracts.Assert(Args.BaggingSize > 0);
return new BaggingProvider(TrainSet, Args.NumLeaves, Args.RngSeed, Args.BaggingTrainFraction);
}
protected virtual bool ShouldRandomStartOptimizer()
{
return false;
}
protected virtual void Train(IChannel ch)
{
Contracts.AssertValue(ch);
int numTotalTrees = Args.NumTrees;
ch.Info(
"Reserved memory for tree learner: {0} bytes",
OptimizationAlgorithm.TreeLearner.GetSizeOfReservedMemory());
#if !NO_STORE
if (_args.offloadBinsToFileStore)
{
// Initialize feature percent to load before loading any features
_featurePercentToLoad = GetFeaturePercentInMemory(ch);
ch.Info("Using featurePercentToLoad = {0} ", _featurePercentToLoad);
}
#endif
// random starting point
bool revertRandomStart = false;
if (Ensemble.NumTrees < numTotalTrees && ShouldRandomStartOptimizer())
{
ch.Info("Randomizing start point");
OptimizationAlgorithm.TrainingScores.RandomizeScores(Args.RngSeed, false);
revertRandomStart = true;
}
ch.Info("Starting to train ...");
BaggingProvider baggingProvider = Args.BaggingSize > 0 ? CreateBaggingProvider() : null;
#if OLD_DATALOAD
#if !NO_STORE
// Preload
GenerateActiveFeatureLists(_args.numTrees);
Thread featureLoadThread = null;
// Initial feature load
if (_args.offloadBinsToFileStore)
{
FileObjectStore<IntArrayFormatter>.GetDefaultInstance().SealObjectStore();
if (_args.preloadFeatureBinsBeforeTraining)
{
StartFeatureLoadThread(GetNextFeaturesByThreshold(_featurePercentToLoad)).Join();
}
}
#endif
#endif
IEarlyStoppingCriterion earlyStoppingRule = null;
int bestIteration = 0;
int emptyTrees = 0;
using (var pch = Host.StartProgressChannel("FastTree training"))
{
pch.SetHeader(new ProgressHeader("trees"), e => e.SetProgress(0, Ensemble.NumTrees, numTotalTrees));
while (Ensemble.NumTrees < numTotalTrees)
{
using (Timer.Time(TimerEvent.Iteration))
{
#if NO_STORE
bool[] activeFeatures = GetActiveFeatures();
#else
bool[] activeFeatures = _activeFeatureSetQueue.Dequeue();
#endif
if (Args.BaggingSize > 0 && Ensemble.NumTrees % Args.BaggingSize == 0)
{
baggingProvider.GenerateNewBag();
OptimizationAlgorithm.TreeLearner.Partitioning =
baggingProvider.GetCurrentTrainingPartition();
}
#if !NO_STORE
if (_args.offloadBinsToFileStore)
{
featureLoadThread = StartFeatureLoadThread(GetNextFeaturesByThreshold(_featurePercentToLoad));
if (!_args.preloadFeatureBinsBeforeTraining)
featureLoadThread.Join();
}
#endif
// call the weak learner
var tree = OptimizationAlgorithm.TrainingIteration(ch, activeFeatures);
if (tree == null)
{
emptyTrees++;
numTotalTrees--;
}
else if (Args.BaggingSize > 0 && Ensemble.Trees.Count() > 0)
{
ch.Assert(Ensemble.Trees.Last() == tree);
Ensemble.Trees.Last()
.AddOutputsToScores(OptimizationAlgorithm.TrainingScores.Dataset,
OptimizationAlgorithm.TrainingScores.Scores,
baggingProvider.GetCurrentOutOfBagPartition().Documents);
}
CustomizedTrainingIteration(tree);
using (Timer.Time(TimerEvent.Test))
{
PrintIterationMessage(ch, pch);
PrintTestResults(ch);
}
// revert randomized start
if (revertRandomStart)
{
revertRandomStart = false;
ch.Info("Reverting random score assignment");
OptimizationAlgorithm.TrainingScores.RandomizeScores(Args.RngSeed, true);
}
#if !NO_STORE
if (_args.offloadBinsToFileStore)
{
// Unload only features that are not needed for the next iteration
bool[] featuresToUnload = activeFeatures;
if (_args.preloadFeatureBinsBeforeTraining)
{
featuresToUnload =
activeFeatures.Zip(GetNextFeaturesByThreshold(_featurePercentToLoad),
(current, next) => current && !next).ToArray();
}
UnloadFeatureBins(featuresToUnload);
if (featureLoadThread != null &&
_args.preloadFeatureBinsBeforeTraining)
{
// wait for loading the features needed for the next iteration
featureLoadThread.Join();
}
}
#endif
if (ShouldStop(ch, ref earlyStoppingRule, ref bestIteration))
break;
}
}
if (emptyTrees > 0)
{
ch.Warning("{0} of the boosting iterations failed to grow a tree. This is commonly because the " +
"minimum documents in leaf hyperparameter was set too high for this dataset.", emptyTrees);
}
}
if (earlyStoppingRule != null)
{
Contracts.Assert(numTotalTrees == 0 || bestIteration > 0);
// REVIEW: Need to reconcile with future progress reporting changes.
ch.Info("The training is stopped at {0} and iteration {1} is picked",
Ensemble.NumTrees, bestIteration);
}
else
{
bestIteration = GetBestIteration(ch);
}
OptimizationAlgorithm.FinalizeLearning(bestIteration);
Ensemble.PopulateRawThresholds(TrainSet);
ParallelTraining.FinalizeTreeLearner();
}
#if !NO_STORE
/// <summary>
/// Gets the available bytes performance counter on the local machine
/// </summary>
/// <returns>Available bytes number</returns>
private float GetMachineAvailableBytes()
{
using (var availableBytes = new System.Diagnostics.PerformanceCounter("Memory", "Available Bytes", true))
{
return availableBytes.NextValue();
}
}
#endif
// This method is called at the end of each training iteration, with the tree that was learnt on that iteration.
// Note that this tree can be null if no tree was learnt this iteration.
protected virtual void CustomizedTrainingIteration(RegressionTree tree)
{
}
protected virtual void PrintIterationMessage(IChannel ch, IProgressChannel pch)
{
// REVIEW: report some metrics, not just number of trees?
int iteration = Ensemble.NumTrees;
if (iteration % 50 == 49)
pch.Checkpoint(iteration + 1);
}
protected virtual void PrintTestResults(IChannel ch)
{
if (Args.TestFrequency != int.MaxValue && (Ensemble.NumTrees % Args.TestFrequency == 0 || Ensemble.NumTrees == Args.NumTrees))
{
var sb = new StringBuilder();
using (var sw = new StringWriter(sb))
{
foreach (var t in Tests)
{
var results = t.ComputeTests();
sw.Write(t.FormatInfoString());
}
}
if (sb.Length > 0)
ch.Info(sb.ToString());
}
}
protected virtual void PrintPrologInfo(IChannel ch)
{
Contracts.AssertValue(ch);
ch.Trace("Host = {0}", Environment.MachineName);
ch.Trace("CommandLine = {0}", CmdParser.GetSettings(Host, Args, new TArgs()));
ch.Trace("GCSettings.IsServerGC = {0}", System.Runtime.GCSettings.IsServerGC);
ch.Trace("{0}", Args);
}
protected ScoreTracker ConstructScoreTracker(Dataset set)
{
// If not found contruct one
ScoreTracker st = null;
if (set == TrainSet)
st = OptimizationAlgorithm.GetScoreTracker("train", TrainSet, InitTrainScores);
else if (set == ValidSet)
st = OptimizationAlgorithm.GetScoreTracker("valid", ValidSet, InitValidScores);
else
{
for (int t = 0; t < TestSets.Length; ++t)
{
if (set == TestSets[t])
{
double[] initTestScores = InitTestScores?[t];
st = OptimizationAlgorithm.GetScoreTracker(string.Format("test[{0}]", t), TestSets[t], initTestScores);
}
}
}
Contracts.Check(st != null, "unknown dataset passed to ConstructScoreTracker");
return st;
}
private double[] ComputeScoresSmart(IChannel ch, Dataset set)
{
if (!Args.CompressEnsemble)
{
foreach (var st in OptimizationAlgorithm.TrackedScores)
if (st.Dataset == set)
{
ch.Trace("Computing scores fast");
return st.Scores;
}
}
return ComputeScoresSlow(ch, set);
}
private double[] ComputeScoresSlow(IChannel ch, Dataset set)
{
ch.Trace("Computing scores slow");
double[] scores = new double[set.NumDocs];
Ensemble.GetOutputs(set, scores);
double[] initScores = GetInitScores(set);
if (initScores != null)
{
Contracts.Check(scores.Length == initScores.Length, "Length of initscores and scores mismatch");
for (int i = 0; i < scores.Length; i++)
scores[i] += initScores[i];
}
return scores;
}
private double[] GetInitScores(Dataset set)
{
if (set == TrainSet)
return InitTrainScores;
if (set == ValidSet)
return InitValidScores;
for (int i = 0; TestSets != null && i < TestSets.Length; i++)
{
if (set == TestSets[i])
return InitTestScores?[i];
}
throw Contracts.Except("Queried for unknown set");
}
}
internal abstract class DataConverter
{
protected readonly int NumFeatures;
public abstract int NumExamples { get; }
protected readonly Float MaxLabel;
protected readonly PredictionKind PredictionKind;
/// <summary>
/// The per-feature bin upper bounds. Implementations may differ on when all of the items
/// in this array are initialized to non-null values but it must happen at least no later
/// than immediately after we return from <see cref="GetDataset"/>.
/// </summary>
public readonly Double[][] BinUpperBounds;
/// <summary>
/// In the event that any features are filtered, this will contain the feature map, where
/// the indices are the indices of features within the dataset, and the tree as we are
/// learning, and the values are the indices of the features within the original input
/// data. This array is used to "rehydrate" the tree once we finish training, so that the
/// feature indices are once again over the full set of features, as opposed to the subset
/// of features we actually trained on. This can be null in the event that no filtering
/// occurred.
/// </summary>
/// <seealso cref="Ensemble.RemapFeatures"/>
public int[] FeatureMap;
protected readonly IHost Host;
protected readonly int[] CategoricalFeatureIndices;
protected readonly bool CategoricalSplit;
protected bool UsingMaxLabel
{
get { return MaxLabel != Float.PositiveInfinity; }
}
private DataConverter(RoleMappedData data, IHost host, Double[][] binUpperBounds, Float maxLabel,
PredictionKind kind, int[] categoricalFeatureIndices, bool categoricalSplit)
{
Contracts.AssertValue(host, "host");
Host = host;
Host.CheckValue(data, nameof(data));
data.CheckFeatureFloatVector();
data.CheckOptFloatWeight();
data.CheckOptGroup();
NumFeatures = data.Schema.Feature.Type.VectorSize;
if (binUpperBounds != null)
{
Host.AssertValue(binUpperBounds);
Host.Assert(Utils.Size(binUpperBounds) == NumFeatures);
Host.Assert(binUpperBounds.All(b => b != null));
BinUpperBounds = binUpperBounds;
}
else
BinUpperBounds = new Double[NumFeatures][];
MaxLabel = maxLabel;
PredictionKind = kind;
CategoricalSplit = categoricalSplit;
CategoricalFeatureIndices = categoricalFeatureIndices;
}
public static DataConverter Create(RoleMappedData data, IHost host, int maxBins,
Float maxLabel, bool diskTranspose, bool noFlocks, int minDocsPerLeaf, PredictionKind kind,
IParallelTraining parallelTraining, int[] categoricalFeatureIndices, bool categoricalSplit)
{
Contracts.AssertValue(host, "host");
host.AssertValue(data);
host.Assert(maxBins > 0);
DataConverter conv;
using (var ch = host.Start("CreateConverter"))
{
if (!diskTranspose)
conv = new MemImpl(data, host, maxBins, maxLabel, noFlocks, minDocsPerLeaf, kind,
parallelTraining, categoricalFeatureIndices, categoricalSplit);
else
conv = new DiskImpl(data, host, maxBins, maxLabel, kind, parallelTraining, categoricalFeatureIndices, categoricalSplit);
}
return conv;
}
public static DataConverter Create(RoleMappedData data, IHost host, Double[][] binUpperBounds,
Float maxLabel, bool diskTranspose, bool noFlocks, PredictionKind kind, int[] categoricalFeatureIndices, bool categoricalSplit)
{
Contracts.AssertValue(host, "host");
host.AssertValue(data);
DataConverter conv;
using (var ch = host.Start("CreateConverter"))
{
if (!diskTranspose)
conv = new MemImpl(data, host, binUpperBounds, maxLabel, noFlocks, kind, categoricalFeatureIndices, categoricalSplit);
else
conv = new DiskImpl(data, host, binUpperBounds, maxLabel, kind, categoricalFeatureIndices, categoricalSplit);
}
return conv;
}
protected void GetFeatureNames(RoleMappedData data, ref VBuffer<ReadOnlyMemory<char>> names)
{
// The existing implementations will have verified this by the time this utility
// function is called.
Host.AssertValue(data);
var feat = data.Schema.Feature;
Host.AssertValue(feat);
Host.Assert(feat.Type.ValueCount > 0);
var sch = data.Schema.Schema;
if (sch.HasSlotNames(feat.Index, feat.Type.ValueCount))
sch.GetMetadata(MetadataUtils.Kinds.SlotNames, feat.Index, ref names);
else
names = new VBuffer<ReadOnlyMemory<char>>(feat.Type.ValueCount, 0, names.Values, names.Indices);
}
#if !CORECLR
protected void GetFeatureIniContent(RoleMappedData data, ref VBuffer<ReadOnlyMemory<char>> content)
{
// The existing implementations will have verified this by the time this utility
// function is called.
Host.AssertValue(data);
var feat = data.Schema.Feature;
Host.AssertValue(feat);
Host.Assert(feat.Type.ValueCount > 0);
var sch = data.Schema.Schema;
var type = sch.GetMetadataTypeOrNull(BingBinLoader.IniContentMetadataKind, feat.Index);
if (type == null || type.VectorSize != feat.Type.ValueCount || !type.IsVector || !type.ItemType.IsText)
content = new VBuffer<ReadOnlyMemory<char>>(feat.Type.ValueCount, 0, content.Values, content.Indices);
else
sch.GetMetadata(BingBinLoader.IniContentMetadataKind, feat.Index, ref content);
}
#endif
public abstract Dataset GetDataset();
/// <summary>
/// Bins and input vector of feature values.
/// </summary>
/// <param name="binFinder">The instead of the bin finder to use</param>
/// <param name="values">The values for one particular feature value across all examples</param>
/// <param name="maxBins">The maximum number of bins to find</param>
/// <param name="minDocsPerLeaf"></param>
/// <param name="distinctValues">The working array of distinct values, a temporary buffer that should be called
/// to multiple invocations of this method, but not meant to be useful to the caller. This method will reallocate
/// the array to a new size if necessary. Passing in null at first is acceptable.</param>
/// <param name="distinctCounts">Similar working array, but for distinct counts</param>
/// <param name="upperBounds">The bin upper bounds, maximum length will be <paramref name="maxBins"/></param>
/// <returns>Whether finding the bins was successful or not. It will be unsuccessful iff <paramref name="values"/>
/// has any missing values. In that event, the out parameters will be left as null.</returns>
protected static bool CalculateBins(BinFinder binFinder, ref VBuffer<double> values, int maxBins, int minDocsPerLeaf,
ref double[] distinctValues, ref int[] distinctCounts, out double[] upperBounds)
{
return binFinder.FindBins(ref values, maxBins, minDocsPerLeaf, out upperBounds);
}
private static IEnumerable<KeyValuePair<int, int>> NonZeroBinnedValuesForSparse(VBuffer<double> values, Double[] binUpperBounds)
{
Contracts.Assert(!values.IsDense);
Contracts.Assert(Algorithms.FindFirstGE(binUpperBounds, 0) == 0);
for (int i = 0; i < values.Count; ++i)
{
int ge = Algorithms.FindFirstGE(binUpperBounds, values.Values[i]);
if (ge != 0)
yield return new KeyValuePair<int, int>(values.Indices[i], ge);
}
}
private FeatureFlockBase CreateOneHotFlock(IChannel ch,
List<int> features, int[] binnedValues, int[] lastOn, ValuesList[] instanceList,
ref int[] forwardIndexerWork, ref VBuffer<double> temp, bool categorical)
{
Contracts.AssertValue(ch);
ch.Assert(0 <= features.Min() && features.Max() < NumFeatures);
ch.Assert(features.Count > 0);
if (features.Count == 1)
{
// Singleton.
int fi = features[0];
var values = instanceList[fi];
values.CopyTo(NumExamples, ref temp);
return CreateSingletonFlock(ch, ref temp, binnedValues, BinUpperBounds[fi]);
}
// Multiple, one hot.
int[] hotFeatureStarts = new int[features.Count + 1];
// The position 0 is reserved as the "cold" position for all features in the slot.
// This corresponds to all features being in their first bin (for example, cold). So the
// first feature's "hotness" starts at 1. HOWEVER, for the purpose of defining the
// bins, we start with this array computed off by one. Once we define the bins, we
// will correct it.
hotFeatureStarts[0] = 0;
// There are as many hot positions per feature as there are number of bin upper
// bounds, minus 1. (The first bin is the "cold" position.)
for (int i = 1; i < hotFeatureStarts.Length; ++i)
hotFeatureStarts[i] = hotFeatureStarts[i - 1] + BinUpperBounds[features[i - 1]].Length - 1;
IntArrayBits flockBits = IntArray.NumBitsNeeded(hotFeatureStarts[hotFeatureStarts.Length - 1] + 1);
int min = features[0];
int lim = features[features.Count - 1] + 1;
var ind = new ValuesList.ForwardIndexer(instanceList, features.ToArray(), ref forwardIndexerWork);
int[] f2sf = Utils.CreateArray(lim - min, -1);
for (int i = 0; i < features.Count; ++i)
f2sf[features[i] - min] = i;
int hotCount = 0;
for (int i = 0; i < lastOn.Length; ++i)
{
int fi = lastOn[i];
if (fi < min || fi >= lim)
{
// All of the features would bin to 0, so we're in the "cold" position.
binnedValues[i] = 0;
#if false // This would be a very nice test to have, but for some situations it's too slow, even for debug builds. Consider reactivating temporarily if actively working on flocks.
// Assert that all the features really would be cold for this position.
Contracts.Assert(Enumerable.Range(min, lim - min).All(f => ind[f, i] < BinUpperBounds[f][0]));
#endif
continue;
}
ch.Assert(min <= fi && fi < lim);
int subfeature = f2sf[fi - min];
ch.Assert(subfeature >= 0);
Double val = ind[subfeature, i];
#if false // Same note, too slow even for debug builds.
// Assert that all the other features really would be cold for this position.
Contracts.Assert(Enumerable.Range(min, fi - min).Concat(Enumerable.Range(fi + 1, lim - (fi + 1))).All(f => ind[f, i] < BinUpperBounds[f][0]));
#endif
Double[] bub = BinUpperBounds[fi];
ch.Assert(bub.Length > 1);
int bin = Algorithms.FindFirstGE(bub, val);
ch.Assert(0 < bin && bin < bub.Length); // If 0, should not have been considered "on", so what the heck?
binnedValues[i] = hotFeatureStarts[subfeature] + bin;
hotCount++;
}
#if DEBUG
int limBin = (1 << (int)flockBits);
Contracts.Assert(flockBits == IntArrayBits.Bits32 || binnedValues.All(b => b < limBin));
#endif
// Correct the hot feature starts now that we're done binning.
for (int f = 0; f < hotFeatureStarts.Length; ++f)
hotFeatureStarts[f]++;
// Construct the int array of binned values.
const double sparsifyThreshold = 0.7;
IntArrayType type = hotCount < (1 - sparsifyThreshold) * NumExamples
? IntArrayType.Sparse
: IntArrayType.Dense;
IntArray bins = IntArray.New(NumExamples, type, flockBits, binnedValues);
var bups = features.Select(fi => BinUpperBounds[fi]).ToArray(features.Count);
return new OneHotFeatureFlock(bins, hotFeatureStarts, bups, categorical);
}
private FeatureFlockBase CreateOneHotFlockCategorical(IChannel ch,
List<int> features, int[] binnedValues, int[] lastOn, bool categorical)
{
Contracts.AssertValue(ch);
ch.Assert(0 <= features.Min() && features.Max() < NumFeatures);
ch.Assert(features.Count > 1);
// Multiple, one hot.
int[] hotFeatureStarts = new int[features.Count + 1];
// The position 0 is reserved as the "cold" position for all features in the slot.
// This corresponds to all features being in their first bin (for example, cold). So the
// first feature's "hotness" starts at 1. HOWEVER, for the purpose of defining the
// bins, we start with this array computed off by one. Once we define the bins, we
// will correct it.
hotFeatureStarts[0] = 0;
// There are as many hot positions per feature as there are number of bin upper
// bounds, minus 1. (The first bin is the "cold" position.)
for (int i = 1; i < hotFeatureStarts.Length; ++i)
hotFeatureStarts[i] = hotFeatureStarts[i - 1] + BinUpperBounds[features[i - 1]].Length - 1;
IntArrayBits flockBits = IntArray.NumBitsNeeded(hotFeatureStarts[hotFeatureStarts.Length - 1] + 1);
int min = features[0];
int lim = features[features.Count - 1] + 1;
int[] f2sf = Utils.CreateArray(lim - min, -1);
for (int i = 0; i < features.Count; ++i)
f2sf[features[i] - min] = i;
int hotCount = 0;
for (int i = 0; i < lastOn.Length; ++i)
{
int fi = lastOn[i];
if (fi < min || fi >= lim)
{
// All of the features would bin to 0, so we're in the "cold" position.
binnedValues[i] = 0;
#if false // This would be a very nice test to have, but for some situations it's too slow, even for debug builds. Consider reactivating temporarily if actively working on flocks.
// Assert that all the features really would be cold for this position.
Contracts.Assert(Enumerable.Range(min, lim - min).All(f => ind[f, i] < BinUpperBounds[f][0]));
#endif
continue;
}
ch.Assert(min <= fi && fi < lim);
int subfeature = f2sf[fi - min];
ch.Assert(subfeature >= 0);
#if false // Same note, too slow even for debug builds.
// Assert that all the other features really would be cold for this position.
Contracts.Assert(Enumerable.Range(min, fi - min).Concat(Enumerable.Range(fi + 1, lim - (fi + 1))).All(f => ind[f, i] < BinUpperBounds[f][0]));
#endif
Double[] bub = BinUpperBounds[fi];
ch.Assert(bub.Length == 2);
//REVIEW: leaving out check for the value to reduced memory consuption and going with
//leap of faith based on what the user told.
binnedValues[i] = hotFeatureStarts[subfeature] + 1;
hotCount++;
}
#if DEBUG
int limBin = (1 << (int)flockBits);
Contracts.Assert(flockBits == IntArrayBits.Bits32 || binnedValues.All(b => b < limBin));
#endif
// Correct the hot feature starts now that we're done binning.
for (int f = 0; f < hotFeatureStarts.Length; ++f)
hotFeatureStarts[f]++;
// Construct the int array of binned values.
const double sparsifyThreshold = 0.7;
IntArrayType type = hotCount < (1 - sparsifyThreshold) * NumExamples
? IntArrayType.Sparse
: IntArrayType.Dense;
IntArray bins = IntArray.New(NumExamples, type, flockBits, binnedValues);
var bups = features.Select(fi => BinUpperBounds[fi]).ToArray(features.Count);
return new OneHotFeatureFlock(bins, hotFeatureStarts, bups, categorical);
}
/// <summary>
/// Create a new feature flock with a given name, values and specified bin bounds.
/// </summary>
/// <param name="ch"></param>
/// <param name="values">The values for this feature, that will be binned.</param>
/// <param name="binnedValues">A working array of length equal to the length of the input feature vector</param>
/// <param name="binUpperBounds">The upper bounds of the binning of this feature.</param>
/// <returns>A derived binned derived feature vector.</returns>
protected static SingletonFeatureFlock CreateSingletonFlock(IChannel ch, ref VBuffer<double> values, int[] binnedValues,
Double[] binUpperBounds)
{
Contracts.AssertValue(ch);
ch.Assert(Utils.Size(binUpperBounds) > 0);
ch.AssertValue(binnedValues);
ch.Assert(binnedValues.Length == values.Length);
// TODO: Consider trying to speed up FindFirstGE by making a "map" like is done in the fastrank code
// TODO: Cache binnedValues
int zeroBin = Algorithms.FindFirstGE(binUpperBounds, 0);
// TODO: Make this a settable parameter / use the sparsifyThreshold already in the parameters
const double sparsifyThreshold = 0.7;
IntArray bins = null;
var numBitsNeeded = IntArray.NumBitsNeeded(binUpperBounds.Length);
if (numBitsNeeded == IntArrayBits.Bits0)
bins = new Dense0BitIntArray(values.Length);
else if (!values.IsDense && zeroBin == 0 && values.Count < (1 - sparsifyThreshold) * values.Length)
{
// Special code to go straight from our own sparse format to a sparse IntArray.
// Note: requires zeroBin to be 0 because that's what's assumed in FastTree code
var nonZeroValues = NonZeroBinnedValuesForSparse(values, binUpperBounds);
bins = new DeltaSparseIntArray(values.Length, numBitsNeeded, nonZeroValues);
}
else
{
// Fill the binnedValues array and convert using normal IntArray code
int firstBinCount = 0;
if (!values.IsDense)
{
if (zeroBin != 0)
{
for (int i = 0; i < values.Length; i++)
binnedValues[i] = zeroBin;
}
else
Array.Clear(binnedValues, 0, values.Length);
for (int i = 0; i < values.Count; ++i)
{
if ((binnedValues[values.Indices[i]] = Algorithms.FindFirstGE(binUpperBounds, values.Values[i])) == 0)
firstBinCount++;
}
if (zeroBin == 0)
firstBinCount += values.Length - values.Count;
}
else
{
Double[] denseValues = values.Values;
for (int i = 0; i < values.Length; i++)
{
if (denseValues[i] == 0)
binnedValues[i] = zeroBin;
else
binnedValues[i] = Algorithms.FindFirstGE(binUpperBounds, denseValues[i]);
if (binnedValues[i] == 0)
firstBinCount++;
}
}
// This sparsity check came from the FastRank code.
double firstBinFrac = (double)firstBinCount / binnedValues.Length;
IntArrayType arrayType = firstBinFrac > sparsifyThreshold ? IntArrayType.Sparse : IntArrayType.Dense;
bins = IntArray.New(values.Length, arrayType, IntArray.NumBitsNeeded(binUpperBounds.Length), binnedValues);
}
return new SingletonFeatureFlock(bins, binUpperBounds);
}
private sealed class DiskImpl : DataConverter
{
private readonly int _numExamples;
private readonly Dataset _dataset;
public override int NumExamples { get { return _numExamples; } }
public DiskImpl(RoleMappedData data, IHost host, int maxBins, Float maxLabel, PredictionKind kind,
IParallelTraining parallelTraining, int[] categoricalFeatureIndices, bool categoricalSplit)
: base(data, host, null, maxLabel, kind, categoricalFeatureIndices, categoricalSplit)
{
// use parallel training for training data
Host.AssertValue(parallelTraining);
_dataset = Construct(data, ref _numExamples, maxBins, parallelTraining);
}
public DiskImpl(RoleMappedData data, IHost host,
double[][] binUpperBounds, Float maxLabel, PredictionKind kind, int[] categoricalFeatureIndices, bool categoricalSplit)
: base(data, host, binUpperBounds, maxLabel, kind, categoricalFeatureIndices, categoricalSplit)
{
_dataset = Construct(data, ref _numExamples, -1, null);
}
public override Dataset GetDataset()
{
return _dataset;
}
private static int AddColumnIfNeeded(ColumnInfo info, List<int> toTranspose)
{
if (info == null)
return -1;
// It is entirely possible that a single column could have two roles,
// and so be added twice, but this case is handled by the transposer.
toTranspose.Add(info.Index);
return info.Index;
}
private ValueMapper<VBuffer<T1>, VBuffer<T2>> GetCopier<T1, T2>(ColumnType itemType1, ColumnType itemType2)
{
var conv = Conversions.Instance.GetStandardConversion<T1, T2>(itemType1, itemType2, out bool identity);
if (identity)
{
ValueMapper<VBuffer<T1>, VBuffer<T1>> identityResult =
(ref VBuffer<T1> src, ref VBuffer<T1> dst) => src.CopyTo(ref dst);
return (ValueMapper<VBuffer<T1>, VBuffer<T2>>)(object)identityResult;
}
return
(ref VBuffer<T1> src, ref VBuffer<T2> dst) =>
{
var indices = dst.Indices;
var values = dst.Values;
if (src.Count > 0)
{
if (!src.IsDense)
{
Utils.EnsureSize(ref indices, src.Count);
Array.Copy(src.Indices, indices, src.Count);
}
Utils.EnsureSize(ref values, src.Count);
for (int i = 0; i < src.Count; ++i)
conv(ref src.Values[i], ref values[i]);
}
dst = new VBuffer<T2>(src.Length, src.Count, values, indices);
};
}
private Dataset Construct(RoleMappedData examples, ref int numExamples, int maxBins, IParallelTraining parallelTraining)
{
Host.AssertValue(examples);
Host.AssertValue(examples.Schema.Feature);
Host.AssertValueOrNull(examples.Schema.Label);
Host.AssertValueOrNull(examples.Schema.Group);
Host.AssertValueOrNull(examples.Schema.Weight);
if (parallelTraining == null)
Host.AssertValue(BinUpperBounds);
Dataset result;
using (var ch = Host.Start("Conversion"))
{
// Add a missing value filter on the features.
// REVIEW: Possibly filter out missing labels, but we don't do this in current FastTree conversion.
//var missingArgs = new MissingValueFilter.Arguments();
//missingArgs.column = new string[] { examples.Schema.Feature.Name };
//IDataView data = new MissingValueFilter(missingArgs, Host, examples.Data);
IDataView data = examples.Data;
// Convert the label column, if one exists.
var labelName = examples.Schema.Label?.Name;
if (labelName != null)
{
var convArgs = new LabelConvertTransform.Arguments();
var convCol = new LabelConvertTransform.Column() { Name = labelName, Source = labelName };
convArgs.Column = new LabelConvertTransform.Column[] { convCol };
data = new LabelConvertTransform(Host, convArgs, data);
}
// Convert the group column, if one exists.
if (examples.Schema.Group != null)
{
var convArgs = new ConvertTransform.Arguments();
var convCol = new ConvertTransform.Column
{
ResultType = DataKind.U8
};
convCol.Name = convCol.Source = examples.Schema.Group.Name;
convArgs.Column = new ConvertTransform.Column[] { convCol };
data = new ConvertTransform(Host, convArgs, data);
}
// Since we've passed it through a few transforms, reconstitute the mapping on the
// newly transformed data.
examples = new RoleMappedData(data, examples.Schema.GetColumnRoleNames());
// Get the index of the columns in the transposed view, while we're at it composing
// the list of the columns we want to transpose.
var toTranspose = new List<int>();
int featIdx = AddColumnIfNeeded(examples.Schema.Feature, toTranspose);
int labelIdx = AddColumnIfNeeded(examples.Schema.Label, toTranspose);
int groupIdx = AddColumnIfNeeded(examples.Schema.Group, toTranspose);
int weightIdx = AddColumnIfNeeded(examples.Schema.Weight, toTranspose);
Host.Assert(1 <= toTranspose.Count && toTranspose.Count <= 4);
ch.Info("Changing data from row-wise to column-wise on disk");
// Note that if these columns are already transposed, then this will be a no-op.
using (Transposer trans = Transposer.Create(Host, data, false, toTranspose.ToArray()))
{
VBuffer<float> temp = default(VBuffer<float>);
// Construct the derived features.
var features = new FeatureFlockBase[NumFeatures];
BinFinder finder = new BinFinder();
FeaturesToContentMap fmap = new FeaturesToContentMap(examples.Schema);
var hasMissingPred = Conversions.Instance.GetHasMissingPredicate<Float>(trans.TransposeSchema.GetSlotType(featIdx));
// There is no good mechanism to filter out rows with missing feature values on transposed data.
// So, we instead perform one featurization pass which, if successful, will remain one pass but,
// if we ever encounter missing values will become a "detect missing features" pass, which will
// in turn inform a necessary featurization pass secondary
SlotDropper slotDropper = null;
bool[] localConstructBinFeatures = Utils.CreateArray<bool>(NumFeatures, true);
if (parallelTraining != null)
localConstructBinFeatures = parallelTraining.GetLocalBinConstructionFeatures(NumFeatures);
using (var pch = Host.StartProgressChannel("FastTree disk-based bins initialization"))
{
for (; ; )
{
bool hasMissing = false;
using (var cursor = trans.GetSlotCursor(featIdx))
{
HashSet<int> constructed = new HashSet<int>();
var getter = SubsetGetter(cursor.GetGetter<Float>(), slotDropper);
numExamples = slotDropper?.DstLength ?? trans.RowCount;
// Perhaps we should change the binning to just work over singles.
VBuffer<double> doubleTemp = default(VBuffer<double>);
double[] distinctValues = null;
int[] distinctCounts = null;
var copier = GetCopier<Float, Double>(NumberType.Float, NumberType.R8);
int iFeature = 0;
pch.SetHeader(new ProgressHeader("features"), e => e.SetProgress(0, iFeature, features.Length));
while (cursor.MoveNext())
{
iFeature = checked((int)cursor.Position);
if (!localConstructBinFeatures[iFeature])
continue;
Host.Assert(iFeature < features.Length);
Host.Assert(features[iFeature] == null);
getter(ref temp);
Host.Assert(temp.Length == numExamples);
// First get the bin bounds, constructing them if they do not exist.
if (BinUpperBounds[iFeature] == null)
{
constructed.Add(iFeature);
ch.Assert(maxBins > 0);
finder = finder ?? new BinFinder();
// Must copy over, as bin calculation is potentially destructive.
copier(ref temp, ref doubleTemp);
hasMissing = !CalculateBins(finder, ref doubleTemp, maxBins, 0,
ref distinctValues, ref distinctCounts,
out BinUpperBounds[iFeature]);
}
else
hasMissing = hasMissingPred(ref temp);
if (hasMissing)
{
// Let's just be a little extra safe, since it's so easy to check and the results if there
// is a bug in the upstream pipeline would be very severe.
ch.Check(slotDropper == null,
"Multiple passes over the data seem to be producing different data. There is a bug in the upstream pipeline.");
// Destroy any constructed bin upper bounds. We'll calculate them over the next pass.
foreach (var i in constructed)
BinUpperBounds[i] = null;
// Determine what rows have missing values.
slotDropper = ConstructDropSlotRanges(cursor, getter, ref temp);
ch.Assert(slotDropper.DstLength < temp.Length);
ch.Warning("{0} of {1} examples will be skipped due to missing feature values",
temp.Length - slotDropper.DstLength, temp.Length);
break;
}
Host.AssertValue(BinUpperBounds[iFeature]);
}
}
if (hasMissing == false)
break;
}
// Sync up global boundaries.
if (parallelTraining != null)
parallelTraining.SyncGlobalBoundary(NumFeatures, maxBins, BinUpperBounds);
List<FeatureFlockBase> flocks = new List<FeatureFlockBase>();
using (var cursor = trans.GetSlotCursor(featIdx))
using (var catCursor = trans.GetSlotCursor(featIdx))
{
var getter = SubsetGetter(cursor.GetGetter<Float>(), slotDropper);
var catGetter = SubsetGetter(catCursor.GetGetter<Float>(), slotDropper);
numExamples = slotDropper?.DstLength ?? trans.RowCount;
// Perhaps we should change the binning to just work over singles.
VBuffer<double> doubleTemp = default(VBuffer<double>);
int[] binnedValues = new int[numExamples];
var copier = GetCopier<Float, Double>(NumberType.Float, NumberType.R8);
int iFeature = 0;
if (CategoricalSplit && CategoricalFeatureIndices != null)
{
int[] lastOn = new int[NumExamples];
for (int i = 0; i < lastOn.Length; ++i)
lastOn[i] = -1;
List<int> pending = new List<int>();
int catRangeIndex = 0;
for (iFeature = 0; iFeature < NumFeatures;)
{
if (catRangeIndex < CategoricalFeatureIndices.Length &&
CategoricalFeatureIndices[catRangeIndex] == iFeature)
{
pending.Clear();
bool oneHot = true;
for (int iFeatureLocal = iFeature;
iFeatureLocal <= CategoricalFeatureIndices[catRangeIndex + 1];
++iFeatureLocal)
{
Double[] bup = BinUpperBounds[iFeatureLocal];
if (bup.Length == 1)
{
// This is a trivial feature. Skip it.
continue;
}
Contracts.Assert(Utils.Size(bup) > 0);
Double firstBin = bup[0];
GetFeatureValues(catCursor, iFeatureLocal, catGetter, ref temp, ref doubleTemp, copier);
bool add = false;
for (int index = 0; index < doubleTemp.Count; ++index)
{
if (doubleTemp.Values[index] <= firstBin)
continue;
int iindex = doubleTemp.IsDense ? index : doubleTemp.Indices[index];
int last = lastOn[iindex];
if (doubleTemp.Values[index] != 1 || (last != -1 && last >= iFeature))
{
catRangeIndex += 2;
pending.Clear();
oneHot = false;
break;
}
lastOn[iindex] = iFeatureLocal;
add = true;
}
if (!oneHot)
break;
if (add)
pending.Add(iFeatureLocal);
}
if (!oneHot)
continue;
if (pending.Count > 0)
{
flocks.Add(CreateOneHotFlockCategorical(ch, pending, binnedValues,
lastOn, true));
}
iFeature = CategoricalFeatureIndices[catRangeIndex + 1] + 1;
catRangeIndex += 2;
}
else
{
GetFeatureValues(cursor, iFeature, getter, ref temp, ref doubleTemp, copier);
double[] upperBounds = BinUpperBounds[iFeature++];
Host.AssertValue(upperBounds);
if (upperBounds.Length == 1)
continue; //trivial feature, skip it.
flocks.Add(CreateSingletonFlock(ch, ref doubleTemp, binnedValues, upperBounds));
}
}
}
else
{
for (int i = 0; i < NumFeatures; i++)
{
GetFeatureValues(cursor, i, getter, ref temp, ref doubleTemp, copier);
double[] upperBounds = BinUpperBounds[i];
Host.AssertValue(upperBounds);
if (upperBounds.Length == 1)
continue; //trivial feature, skip it.
flocks.Add(CreateSingletonFlock(ch, ref doubleTemp, binnedValues, upperBounds));
}
}
Contracts.Assert(FeatureMap == null);
FeatureMap = Enumerable.Range(0, NumFeatures).Where(f => BinUpperBounds[f].Length > 1).ToArray();
features = flocks.ToArray();
}
}
// Construct the labels.
short[] ratings = new short[numExamples];
Double[] actualLabels = new Double[numExamples];
if (labelIdx >= 0)
{
trans.GetSingleSlotValue<Float>(labelIdx, ref temp);
slotDropper?.DropSlots(ref temp, ref temp);
for (int i = 0; i < temp.Count; ++i)
{
int ii = temp.IsDense ? i : temp.Indices[i];
var label = temp.Values[i];
if (UsingMaxLabel && !(0 <= label && label <= MaxLabel))
throw Host.Except("Found invalid label {0}. Value should be between 0 and {1}, inclusive.", label, MaxLabel);
ratings[ii] = (short)label;
actualLabels[ii] = (Double)label;
}
}
// Construct the boundaries and query IDs.
int[] boundaries;
ulong[] qids;
if (PredictionKind == PredictionKind.Ranking)
{
if (groupIdx < 0)
throw ch.Except("You need to provide {0} column for Ranking problem", DefaultColumnNames.GroupId);
VBuffer<ulong> groupIds = default(VBuffer<ulong>);
trans.GetSingleSlotValue<ulong>(groupIdx, ref groupIds);
slotDropper?.DropSlots(ref groupIds, ref groupIds);
ConstructBoundariesAndQueryIds(ref groupIds, out boundaries, out qids);
}
else
{
if (groupIdx >= 0)
ch.Warning("This is not ranking problem, Group Id '{0}' column will be ignored", examples.Schema.Group.Name);
const int queryChunkSize = 100;
qids = new ulong[(numExamples - 1) / queryChunkSize + 1];
boundaries = new int[qids.Length + 1];
for (int i = 0; i < qids.Length; ++i)
{
qids[i] = (ulong)i;
boundaries[i + 1] = boundaries[i] + queryChunkSize;
}
boundaries[boundaries.Length - 1] = numExamples;
}
// Construct the doc IDs. Doesn't really matter what these are.
ulong[] dids = Enumerable.Range(0, numExamples).Select(d => (ulong)d).ToArray(numExamples);
var skeleton = new Dataset.DatasetSkeleton(ratings, boundaries, qids, dids, new double[0][], actualLabels);
Host.Assert(features.All(f => f != null));
result = new Dataset(skeleton, features);
}
}
return result;
}
private void GetFeatureValues(ISlotCursor cursor, int iFeature, ValueGetter<VBuffer<float>> getter,
ref VBuffer<float> temp, ref VBuffer<double> doubleTemp, ValueMapper<VBuffer<float>, VBuffer<double>> copier)
{
while (cursor.MoveNext())
{
Contracts.Assert(iFeature >= checked((int)cursor.Position));
if (iFeature == checked((int)cursor.Position))
break;
}
Contracts.Assert(cursor.Position == iFeature);
getter(ref temp);
copier(ref temp, ref doubleTemp);
}
private static ValueGetter<VBuffer<T>> SubsetGetter<T>(ValueGetter<VBuffer<T>> getter, SlotDropper slotDropper)
{
if (slotDropper == null)
return getter;
return slotDropper.SubsetGetter(getter);
}
/// <summary>
/// Returns a slot dropper object that has ranges of slots to be dropped,
/// based on an examination of the feature values.
/// </summary>
private static SlotDropper ConstructDropSlotRanges(ISlotCursor cursor,
ValueGetter<VBuffer<float>> getter, ref VBuffer<float> temp)
{
// The iteration here is slightly differently from a usual cursor iteration. Here, temp
// already holds the value of the cursor's current position, and we don't really want
// to re-fetch it, and the cursor is necessarily advanced.
Contracts.Assert(cursor.State == CursorState.Good);
BitArray rowHasMissing = new BitArray(temp.Length);
for (; ; )
{
foreach (var kv in temp.Items())
{
if (Float.IsNaN(kv.Value))
rowHasMissing.Set(kv.Key, true);
}
if (!cursor.MoveNext())
break;
getter(ref temp);
}
List<int> minSlots = new List<int>();
List<int> maxSlots = new List<int>();
bool previousBit = false;
for (int i = 0; i < rowHasMissing.Length; i++)
{
bool currentBit = rowHasMissing.Get(i);
if (currentBit && !previousBit)
{
minSlots.Add(i);
maxSlots.Add(i);
}
else if (currentBit)
maxSlots[maxSlots.Count - 1] = i;
previousBit = currentBit;
}
Contracts.Assert(maxSlots.Count == minSlots.Count);
return new SlotDropper(temp.Length, minSlots.ToArray(), maxSlots.ToArray());
}
private static void ConstructBoundariesAndQueryIds(ref VBuffer<ulong> groupIds, out int[] boundariesArray, out ulong[] qidsArray)
{
List<ulong> qids = new List<ulong>();
List<int> boundaries = new List<int>();
ulong last = 0;
if (groupIds.Length > 0)
groupIds.GetItemOrDefault(0, ref last);
int count = 0;
foreach (ulong groupId in groupIds.DenseValues())
{
if (count == 0 || last != groupId)
{
qids.Add(last = groupId);
boundaries.Add(count);
}
count++;
}
boundaries.Add(count);
qidsArray = qids.ToArray();
boundariesArray = boundaries.ToArray();
}
}
// REVIEW: Our data conversion is extremely inefficient. Fix it!
private sealed class MemImpl : DataConverter
{
private readonly RoleMappedData _data;
// instanceList[feature] is the vector of values for the given feature
private readonly ValuesList[] _instanceList;
private readonly List<short> _targetsList;
private readonly List<double> _actualTargets;
private readonly List<double> _weights;
private readonly List<int> _boundaries;
private readonly long _numMissingInstances;
private readonly int _numExamples;
private readonly bool _noFlocks;
private readonly int _minDocsPerLeaf;
public override int NumExamples
{
get { return _numExamples; }
}
private MemImpl(RoleMappedData data, IHost host, double[][] binUpperBounds, Float maxLabel, bool dummy,
bool noFlocks, PredictionKind kind, int[] categoricalFeatureIndices, bool categoricalSplit)
: base(data, host, binUpperBounds, maxLabel, kind, categoricalFeatureIndices, categoricalSplit)
{
_data = data;
// Array of List<double> objects for each feature, containing values for that feature over all rows
_instanceList = new ValuesList[NumFeatures];
for (int i = 0; i < _instanceList.Length; i++)
_instanceList[i] = new ValuesList();
// Labels.
_targetsList = new List<short>();
_actualTargets = new List<double>();
_weights = data.Schema.Weight != null ? new List<double>() : null;
_boundaries = new List<int>();
_noFlocks = noFlocks;
MakeBoundariesAndCheckLabels(out _numMissingInstances, out long numInstances);
if (numInstances > Utils.ArrayMaxSize)
throw Host.ExceptParam(nameof(data), "Input data had {0} rows, but can only accomodate {1}", numInstances, Utils.ArrayMaxSize);
_numExamples = (int)numInstances;
}
public MemImpl(RoleMappedData data, IHost host, int maxBins, Float maxLabel, bool noFlocks, int minDocsPerLeaf,
PredictionKind kind, IParallelTraining parallelTraining, int[] categoricalFeatureIndices, bool categoricalSplit)
: this(data, host, null, maxLabel, dummy: true, noFlocks: noFlocks, kind: kind,
categoricalFeatureIndices: categoricalFeatureIndices, categoricalSplit: categoricalSplit)
{
// Convert features to binned values.
_minDocsPerLeaf = minDocsPerLeaf;
InitializeBins(maxBins, parallelTraining);
}
public MemImpl(RoleMappedData data, IHost host, double[][] binUpperBounds, Float maxLabel,
bool noFlocks, PredictionKind kind, int[] categoricalFeatureIndices, bool categoricalSplit)
: this(data, host, binUpperBounds, maxLabel, dummy: true, noFlocks: noFlocks, kind: kind,
categoricalFeatureIndices: categoricalFeatureIndices, categoricalSplit: categoricalSplit)
{
Host.AssertValue(binUpperBounds);
}
private void MakeBoundariesAndCheckLabels(out long missingInstances, out long totalInstances)
{
using (var ch = Host.Start("InitBoundariesAndLabels"))
using (var pch = Host.StartProgressChannel("FastTree data preparation"))
{
long featureValues = 0;
// Warn at about 2 GB usage.
const long featureValuesWarnThreshold = (2L << 30) / sizeof(Double);
bool featureValuesWarned = false;
const string featureValuesWarning = "We seem to be processing a lot of data. Consider using the FastTree diskTranspose+ (or dt+) option, for slower but more memory efficient transposition.";
const int queryChunkSize = 100;
// Populate the feature values array and labels.
ch.Info("Changing data from row-wise to column-wise");
long pos = 0;
double rowCountDbl = (double?)_data.Data.GetRowCount(lazy: true) ?? Double.NaN;
pch.SetHeader(new ProgressHeader("examples"),
e => e.SetProgress(0, pos, rowCountDbl));
// REVIEW: Should we ignore rows with bad label, weight, or group? The previous code seemed to let
// them through (but filtered out bad features).
CursOpt curOptions = CursOpt.Label | CursOpt.Features | CursOpt.Weight;
bool hasGroup = false;
if (PredictionKind == PredictionKind.Ranking)
{
curOptions |= CursOpt.Group;
hasGroup = _data.Schema.Group != null;
}
else
{
if (_data.Schema.Group != null)
ch.Warning("This is not ranking problem, Group Id '{0}' column will be ignored", _data.Schema.Group.Name);
}
using (var cursor = new FloatLabelCursor(_data, curOptions))
{
ulong groupPrev = 0;
while (cursor.MoveNext())
{
pos = cursor.KeptRowCount - 1;
int index = checked((int)pos);
ch.Assert(pos >= 0);
// If we have no group, then the group number should not change.
Host.Assert(hasGroup || cursor.Group == groupPrev);
if (hasGroup)
{
// If we are either at the start of iteration, or a new
// group has started, add the boundary and register the
// new group identifier.
if (pos == 0 || cursor.Group != groupPrev)
{
_boundaries.Add(index);
groupPrev = cursor.Group;
}
}
else if (pos % queryChunkSize == 0)
{
// If there are no groups, it is best to just put the
// boundaries at regular intervals.
_boundaries.Add(index);
}
if (UsingMaxLabel)
{
if (cursor.Label < 0 || cursor.Label > MaxLabel)
throw ch.Except("Found invalid label {0}. Value should be between 0 and {1}, inclusive.", cursor.Label, MaxLabel);
}
foreach (var kvp in cursor.Features.Items())
_instanceList[kvp.Key].Add(index, kvp.Value);
_actualTargets.Add(cursor.Label);
if (_weights != null)
_weights.Add(cursor.Weight);
_targetsList.Add((short)cursor.Label);
featureValues += cursor.Features.Count;
if (featureValues > featureValuesWarnThreshold && !featureValuesWarned)
{
ch.Warning(featureValuesWarning);
featureValuesWarned = true;
}
}
_boundaries.Add(checked((int)cursor.KeptRowCount));
totalInstances = cursor.KeptRowCount;
missingInstances = cursor.BadFeaturesRowCount;
}
ch.Check(totalInstances > 0, "All instances skipped due to missing features.");
if (missingInstances > 0)
ch.Warning("Skipped {0} instances with missing features during training", missingInstances);
}
}
private void InitializeBins(int maxBins, IParallelTraining parallelTraining)
{
// Find upper bounds for each bin for each feature.
using (var ch = Host.Start("InitBins"))
using (var pch = Host.StartProgressChannel("FastTree in-memory bins initialization"))
{
BinFinder binFinder = new BinFinder();
VBuffer<double> temp = default(VBuffer<double>);
int len = _numExamples;
double[] distinctValues = null;
int[] distinctCounts = null;
bool[] localConstructBinFeatures = parallelTraining.GetLocalBinConstructionFeatures(NumFeatures);
int iFeature = 0;
pch.SetHeader(new ProgressHeader("features"), e => e.SetProgress(0, iFeature, NumFeatures));
List<int> trivialFeatures = new List<int>();
for (iFeature = 0; iFeature < NumFeatures; iFeature++)
{
if (!localConstructBinFeatures[iFeature])
continue;
// The following strange call will actually sparsify.
_instanceList[iFeature].CopyTo(len, ref temp);
// REVIEW: In principle we could also put the min docs per leaf information
// into here, and collapse bins somehow as we determine the bins, so that "trivial"
// bins on the head or tail of the bin distribution are never actually considered.
CalculateBins(binFinder, ref temp, maxBins, _minDocsPerLeaf,
ref distinctValues, ref distinctCounts,
out double[] binUpperBounds);
BinUpperBounds[iFeature] = binUpperBounds;
}
parallelTraining.SyncGlobalBoundary(NumFeatures, maxBins, BinUpperBounds);
}
}
public override Dataset GetDataset()
{
using (var ch = Host.Start("BinFeatures"))
using (var pch = Host.StartProgressChannel("FastTree feature conversion"))
{
FeatureFlockBase[] flocks = CreateFlocks(ch, pch).ToArray();
ch.Trace("{0} features stored in {1} flocks.", NumFeatures, flocks.Length);
return new Dataset(CreateDatasetSkeleton(), flocks);
}
}
private NHotFeatureFlock CreateNHotFlock(IChannel ch, List<int> features)
{
Contracts.AssertValue(ch);
ch.Assert(Utils.Size(features) > 1);
// Copy pasta from above.
int[] hotFeatureStarts = new int[features.Count + 1];
for (int i = 1; i < hotFeatureStarts.Length; ++i)
hotFeatureStarts[i] = hotFeatureStarts[i - 1] + BinUpperBounds[features[i - 1]].Length - 1;
IntArrayBits flockBits = IntArray.NumBitsNeeded(hotFeatureStarts[hotFeatureStarts.Length - 1] + 1);
var kvEnums = new IEnumerator<KeyValuePair<int, int>>[features.Count];
var delta = new List<byte>();
var values = new List<int>();
try
{
for (int i = 0; i < features.Count; ++i)
kvEnums[i] = _instanceList[features[i]].Binned(BinUpperBounds[features[i]], NumExamples).GetEnumerator();
Heap<int> heap = new Heap<int>(
(i, j) =>
{
ch.AssertValue(kvEnums[i]);
ch.AssertValue(kvEnums[j]);
int irow = kvEnums[i].Current.Key;
int jrow = kvEnums[j].Current.Key;
if (irow == jrow) // If we're on the same row, prefer the "smaller" feature.
return j < i;
// Earlier rows should go first.
return jrow < irow;
});
// Do the initial population of the heap.
for (int i = 0; i < kvEnums.Length; ++i)
{
if (kvEnums[i].MoveNext())
heap.Add(i);
else
{
kvEnums[i].Dispose();
kvEnums[i] = null;
}
}
// Iteratively build the delta-sparse and int arrays.
// REVIEW: Could be hinted as having capacity count hot, but may do more harm than good.
int last = 0;
while (heap.Count > 0)
{
int i = heap.Pop();
var kvEnum = kvEnums[i];
ch.AssertValue(kvEnum);
var kvp = kvEnum.Current;
ch.Assert(kvp.Key >= last);
ch.Assert(kvp.Value > 0);
while (kvp.Key - last > Byte.MaxValue)
{
delta.Add(Byte.MaxValue);
values.Add(0);
last += Byte.MaxValue;
}
ch.Assert(kvp.Key - last <= Byte.MaxValue);
// Note that kvp.Key - last might be zero, in the case where we are representing multiple
// values for a single row.
delta.Add((byte)(kvp.Key - last));
values.Add(kvp.Value + hotFeatureStarts[i]);
ch.Assert(kvp.Key > last || values.Count == 1 || values[values.Count - 1] > values[values.Count - 2]);
last = kvp.Key;
if (kvEnum.MoveNext())
heap.Add(i);
else
{
kvEnum.Dispose();
kvEnums[i] = null;
}
}
}
finally
{
// Need to dispose the enumerators.
foreach (var enumerator in kvEnums)
{
if (enumerator != null)
enumerator.Dispose();
}
}
// Correct the hot feature starts now that we're done binning.
for (int f = 0; f < hotFeatureStarts.Length; ++f)
hotFeatureStarts[f]++;
var denseBins = (DenseIntArray)IntArray.New(values.Count, IntArrayType.Dense, flockBits, values);
var bups = features.Select(fi => BinUpperBounds[fi]).ToArray(features.Count);
return new NHotFeatureFlock(denseBins, delta.ToArray(), NumExamples, hotFeatureStarts, bups);
}
private IEnumerable<FeatureFlockBase> CreateFlocks(IChannel ch, IProgressChannel pch)
{
int iFeature = 0;
FeatureMap = Enumerable.Range(0, NumFeatures).Where(f => BinUpperBounds[f].Length > 1).ToArray();
foreach (FeatureFlockBase flock in CreateFlocksCore(ch, pch))
{
Contracts.Assert(flock.Count > 0);
Contracts.Assert(iFeature + flock.Count <= FeatureMap.Length);
int min = FeatureMap[iFeature];
int lim = iFeature + flock.Count == FeatureMap.Length
? NumFeatures
: FeatureMap[iFeature + flock.Count];
for (int i = min; i < lim; ++i)
_instanceList[i] = null;
iFeature += flock.Count;
yield return flock;
}
ch.Assert(iFeature <= NumFeatures); // Some could have been filtered.
ch.Assert(iFeature == FeatureMap.Length);
if (iFeature == 0)
{
// It is possible to filter out all features. In such a case as this we introduce a dummy
// "trivial" feature, so that the learning code downstream does not choke.
yield return new SingletonFeatureFlock(new Dense0BitIntArray(NumExamples), BinUpperBounds[0]);
FeatureMap = new[] { 0 };
}
}
private IEnumerable<FeatureFlockBase> CreateFlocksCore(IChannel ch, IProgressChannel pch)
{
int iFeature = 0;
pch.SetHeader(new ProgressHeader("features"), e => e.SetProgress(0, iFeature, NumFeatures));
VBuffer<double> temp = default(VBuffer<double>);
// Working array for bins.
int[] binnedValues = new int[NumExamples];
if (_noFlocks)
{
for (iFeature = 0; iFeature < NumFeatures; ++iFeature)
{
var bup = BinUpperBounds[iFeature];
ch.Assert(Utils.Size(bup) > 0);
if (bup.Length == 1) // Trivial.
continue;
var values = _instanceList[iFeature];
_instanceList[iFeature] = null;
values.CopyTo(NumExamples, ref temp);
yield return CreateSingletonFlock(ch, ref temp, binnedValues, bup);
}
yield break;
}
List<int> pending = new List<int>();
int[] forwardIndexerWork = null;
if (CategoricalSplit && CategoricalFeatureIndices != null)
{
int[] lastOn = new int[NumExamples];
for (int i = 0; i < lastOn.Length; ++i)
lastOn[i] = -1;
int catRangeIndex = 0;
for (iFeature = 0; iFeature < NumFeatures;)
{
if (catRangeIndex < CategoricalFeatureIndices.Length)
{
if (CategoricalFeatureIndices[catRangeIndex] == iFeature)
{
bool isOneHot = true;
for (int iFeatureLocal = iFeature;
iFeatureLocal <= CategoricalFeatureIndices[catRangeIndex + 1];
++iFeatureLocal)
{
Double[] bup = BinUpperBounds[iFeatureLocal];
if (bup.Length == 1)
{
// This is a trivial feature. Skip it.
continue;
}
Contracts.Assert(Utils.Size(bup) > 0);
Double firstBin = bup[0];
using (IEnumerator<int> hotEnumerator = _instanceList[iFeatureLocal].AllIndicesGT(NumExamples, firstBin).GetEnumerator())
{
while (hotEnumerator.MoveNext())
{
int last = lastOn[hotEnumerator.Current];
//Not a one-hot flock, bail.
if (last >= iFeature)
{
isOneHot = false;
pending.Clear();
break;
}
lastOn[hotEnumerator.Current] = iFeatureLocal;
}
}
pending.Add(iFeatureLocal);
}
if (pending.Count > 0)
{
yield return CreateOneHotFlock(ch, pending, binnedValues, lastOn, _instanceList,
ref forwardIndexerWork, ref temp, true);
pending.Clear();
}
if (isOneHot)
iFeature = CategoricalFeatureIndices[catRangeIndex + 1] + 1;
catRangeIndex += 2;
}
else
{
foreach (var flock in CreateFlocksCore(ch, pch, iFeature, CategoricalFeatureIndices[catRangeIndex]))
yield return flock;
iFeature = CategoricalFeatureIndices[catRangeIndex];
}
}
else
{
foreach (var flock in CreateFlocksCore(ch, pch, iFeature, NumFeatures))
yield return flock;
iFeature = NumFeatures;
}
}
}
else
{
foreach (var flock in CreateFlocksCore(ch, pch, 0, NumFeatures))
yield return flock;
}
}
private IEnumerable<FeatureFlockBase> CreateFlocksCore(IChannel ch, IProgressChannel pch, int startFeatureIndex, int featureLim)
{
int iFeature = startFeatureIndex;
VBuffer<double> temp = default(VBuffer<double>);
// Working array for bins.
int[] binnedValues = new int[NumExamples];
// Holds what feature for an example was last "on", that is, will have
// to be explicitly represented. This was the last feature for which AllIndicesGE
// returned an index.
int[] lastOn = new int[NumExamples];
for (int i = 0; i < lastOn.Length; ++i)
lastOn[i] = -1;
int[] forwardIndexerWork = null;
// What creations are pending?
List<int> pending = new List<int>();
Func<FeatureFlockBase> createOneHotFlock =
() => CreateOneHotFlock(ch, pending, binnedValues, lastOn, _instanceList,
ref forwardIndexerWork, ref temp, false);
Func<FeatureFlockBase> createNHotFlock =
() => CreateNHotFlock(ch, pending);
// The exclusive upper bound of what features have already been incorporated
// into a flock.
int limMade = startFeatureIndex;
int countBins = 1; // Count of bins we'll need to represent. Starts at 1, accumulates "hot" features.
// Tracking for n-hot flocks.
long countHotRows = 0; // The count of hot "rows"
long hotNThreshold = (long)(0.1 * NumExamples);
bool canBeOneHot = true;
Func<FeatureFlockBase> createFlock =
() =>
{
ch.Assert(pending.Count > 0);
FeatureFlockBase flock;
if (canBeOneHot)
flock = createOneHotFlock();
else
flock = createNHotFlock();
canBeOneHot = true;
limMade = iFeature;
pending.Clear();
countHotRows = 0;
countBins = 1;
return flock;
};
for (; iFeature < featureLim; ++iFeature)
{
Double[] bup = BinUpperBounds[iFeature];
Contracts.Assert(Utils.Size(bup) > 0);
if (bup.Length == 1)
{
// This is a trivial feature. Skip it.
continue;
}
ValuesList values = _instanceList[iFeature];
if (countBins > Utils.ArrayMaxSize - (bup.Length - 1))
{
// It can happen that a flock could be created with more than Utils.ArrayMaxSize
// bins, in the case where we bin over a training dataset with many features with
// many bins (for example, 1 million features with 10k bins each), and then in a subsequent
// validation dataset we have these features suddenly become one-hot. Practically
// this will never happen, of course, but it is still possible. If this ever happens,
// we create the flock before this becomes an issue.
ch.Assert(0 < countBins && countBins <= Utils.ArrayMaxSize);
ch.Assert(limMade < iFeature);
ch.Assert(pending.Count > 0);
yield return createFlock();
}
countBins += bup.Length - 1;
Double firstBin = bup[0];
int localHotRows = 0;
// The number of bits we would use if we incorporated the current feature in to the
// existing running flock.
IntArrayBits newBits = IntArray.NumBitsNeeded(countBins);
if (canBeOneHot)
{
using (IEnumerator<int> hotEnumerator = values.AllIndicesGT(NumExamples, firstBin).GetEnumerator())
{
if (pending.Count > 0)
{
// There are prior features we haven't yet flocked. So we are still contemplating
// "flocking" this prior feature with this feature (and possibly features beyond).
// The enumeration will need to run the appropriate checks.
while (hotEnumerator.MoveNext())
{
int i = hotEnumerator.Current;
++localHotRows;
var last = lastOn[i];
Contracts.Assert(last < iFeature);
if (last >= limMade)
{
// We've encountered an overlapping feature. We now need to decide whether we want
// to continue accumulating into a flock and so make this n-hot flock, or cut it off
// now and create a one-hot flock.
if (countHotRows < hotNThreshold)
{
// We may want to create an N-hot flock.
int superLocalHot = values.CountIndicesGT(NumExamples, firstBin);
if (countHotRows + superLocalHot < hotNThreshold)
{
// If this succeeds, we want to create an N-hot flock including this.
canBeOneHot = false;
localHotRows = superLocalHot;
break; // Future iterations will create the n-hot.
}
// If the test above failed, then we want to create a one-hot of [limMade, iFeature),
// and keep going on this guy.
}
// We've decided to create a one-hot flock. Before continuing to fill in lastOn, use
// lastOn in its current state to create a flock from limMade inclusive, to f
// exclusive, and make "f" the new limMade. Note that we continue to fill in lastOn
// once we finish this.
ch.Assert(limMade < iFeature);
ch.Assert(canBeOneHot);
yield return createFlock();
lastOn[i] = iFeature;
// Now that we've made the feature there's no need continually check against lastOn[i]'s
// prior values. Fall through to the limMade == iFeature case.
break;
}
lastOn[i] = iFeature;
}
}
if (canBeOneHot)
{
// In the event that hotEnumerator was exhausted in the above loop, the following is a no-op.
while (hotEnumerator.MoveNext())
{
// There is no prior feature to flock, so there's no need to track anything yet.
// Just populate lastOn appropriately.
++localHotRows;
lastOn[hotEnumerator.Current] = iFeature;
}
}
}
ch.Assert(values.CountIndicesGT(NumExamples, firstBin) == localHotRows);
pending.Add(iFeature); // Have not yet flocked this feature.
}
else
{
// No need to track in lastOn, since we're no longer contemplating this being one-hot.
ch.Assert(limMade < iFeature);
ch.Assert(countHotRows < hotNThreshold);
ch.Assert(!canBeOneHot);
localHotRows = values.CountIndicesGT(NumExamples, firstBin);
if (countHotRows + localHotRows >= hotNThreshold)
{
// Too dense if we add iFeature to the mix. Make an n-hot of [limMade, iFeature),
// then decrement iFeature so that we reconsider it in light of being a candidate
// for one-hot or singleton. Do not add to pending, as its status will be considered
// in the next pass.
yield return createFlock();
--iFeature;
}
else // Have not yet flocked as feature.
pending.Add(iFeature);
}
countHotRows += localHotRows;
}
Contracts.Assert(limMade < featureLim);
if (pending.Count > 0)
yield return createFlock();
}
/// <summary>
/// Create an artificial metadata object to pad the Dataset
/// </summary>
private Dataset.DatasetSkeleton CreateDatasetSkeleton()
{
ulong[] docIds = new ulong[_numExamples]; // All zeros is fine
ulong[] queryIds = new ulong[_boundaries.Count - 1]; // All zeros is fine
var ds = UsingMaxLabel
? new Dataset.DatasetSkeleton(_targetsList.ToArray(), _boundaries.ToArray(), queryIds, docIds, new double[0][])
: new Dataset.DatasetSkeleton(_targetsList.ToArray(), _boundaries.ToArray(), queryIds, docIds, new double[0][], _actualTargets.ToArray());
//AP TODO change it to have weights=null when dataset is unweighted in order to avoid potential long memory scan
if (_weights != null)
ds.SampleWeights = _weights.ToArray();
return ds;
}
}
// REVIEW: Change this, as well as the bin finding code and bin upper bounds, to be Float instead of Double.
/// <summary>
/// A mutable list of index,value that may be kept sparse or dense.
/// </summary>
private sealed class ValuesList
{
private bool _isSparse;
private List<Double> _dense;
private int _nonZeroElements; // when dense, is the number of non-zero elements (for determining when to sparsify)
private List<KeyValuePair<int, Double>> _sparse;
public ValuesList()
{
_dense = new List<Double>();
}
public void Add(int index, Double value)
{
if (!_isSparse)
{
// Check if adding this element will make the array sparse.
if (ShouldSparsify(_nonZeroElements + 1, index + 1))
Sparsify();
else
{
// Add zeros if needed.
while (_dense.Count < index)
_dense.Add(default(Double));
// Add the value.
_dense.Add(value);
if (value != 0)
_nonZeroElements++;
return;
}
}
// Note this also may happen because we just sparsified.
Contracts.Assert(_isSparse);
if (value != 0)
_sparse.Add(new KeyValuePair<int, Double>(index, value));
}
private bool ShouldSparsify(int nonZeroElements, int totalElements)
{
// TODO: We need a better solution here. Also, maybe should start sparse and become dense instead?
return (double)nonZeroElements / totalElements < 0.25 && totalElements > 10;
}
private void Sparsify()
{
_sparse = new List<KeyValuePair<int, Double>>(_nonZeroElements);
for (int i = 0; i < _dense.Count; i++)
{
if (_dense[i] != 0)
_sparse.Add(new KeyValuePair<int, Double>(i, _dense[i]));
}
_isSparse = true;
_dense = null;
}
/// <summary>
/// Returns the count of all positions greater than an indicated value.
/// </summary>
/// <param name="length">The limit of indices to check</param>
/// <param name="gtValue">The value against which the greater-than
/// comparison is made</param>
/// <returns>The count of all indices in the range of 0 to <paramref name="length"/>
/// exclusive whose values are greater than <paramref name="gtValue"/></returns>
public int CountIndicesGT(int length, Double gtValue)
{
Contracts.Assert(0 <= length);
if (_isSparse)
{
Contracts.Assert(_sparse.Count == 0 || _sparse[_sparse.Count - 1].Key < length);
return _sparse.Count(kvp => kvp.Value > gtValue) + (0 > gtValue ? length - _sparse.Count : 0);
}
else
{
Contracts.Assert(_dense.Count <= length);
return _dense.Count(v => v > gtValue) + (0 > gtValue ? length - _dense.Count : 0);
}
}
/// <summary>
/// Return all indices that are greater than an indicated value.
/// </summary>
/// <param name="lim">The limit of indices to return</param>
/// <param name="gtValue">The value against which the greater-than
/// comparison is made</param>
/// <returns>All indices in the range of 0 to <paramref name="lim"/> exclusive
/// whose values are greater than <paramref name="gtValue"/>, in
/// increasing order</returns>
public IEnumerable<int> AllIndicesGT(int lim, Double gtValue)
{
Contracts.Assert(0 <= lim);
if (_isSparse)
{
Contracts.Assert(_sparse.Count == 0 || _sparse[_sparse.Count - 1].Key < lim);
if (0 > gtValue)
{
// All implicitly defined sparse values will have to be returned.
int prev = -1;
foreach (var kvp in _sparse)
{
Contracts.Assert(prev < kvp.Key);
while (++prev < kvp.Key)
yield return prev;
if (kvp.Value > gtValue)
yield return kvp.Key;
}
// Return the "leftovers."
while (++prev < lim)
yield return prev;
}
else
{
// Only explicitly defined values have to be returned.
foreach (var kvp in _sparse)
{
if (kvp.Value > gtValue)
yield return kvp.Key;
}
}
}
else
{
Contracts.Assert(_dense.Count <= lim);
for (int i = 0; i < _dense.Count; ++i)
{
if (_dense[i] > gtValue)
yield return i;
}
if (0 > gtValue)
{
// All implicitly defined post-dense values will have to be returned,
// assuming there are any (this set is only non-empty when listLim < lim).
for (int i = _dense.Count; i < lim; ++i)
yield return i;
}
}
}
public void CopyTo(int length, ref VBuffer<Double> dst)
{
Contracts.Assert(0 <= length);
int[] indices = dst.Indices;
Double[] values = dst.Values;
if (!_isSparse)
{
Contracts.Assert(_dense.Count <= length);
if (ShouldSparsify(_nonZeroElements, length))
Sparsify();
else
{
Utils.EnsureSize(ref values, length, keepOld: false);
if (_dense.Count < length)
{
_dense.CopyTo(values, 0);
Array.Clear(values, _dense.Count, length - _dense.Count);
}
else
_dense.CopyTo(0, values, 0, length);
dst = new VBuffer<Double>(length, values, indices);
return;
}
}
int count = _sparse.Count;
Contracts.Assert(count <= length);
Utils.EnsureSize(ref indices, count);
Utils.EnsureSize(ref values, count);
for (int i = 0; i < _sparse.Count; ++i)
{
indices[i] = _sparse[i].Key;
values[i] = _sparse[i].Value;
}
Contracts.Assert(Utils.IsIncreasing(0, indices, count, length));
dst = new VBuffer<Double>(length, count, values, indices);
}
/// <summary>
/// An enumerable of the row/bin pair of every non-zero bin row according to the
/// binning passed into this method.
/// </summary>
/// <param name="binUpperBounds">The binning to use for the enumeration</param>
/// <param name="length">The number of rows in this feature</param>
/// <returns>An enumerable that returns a pair of every row-index and binned value,
/// where the row indices are increasing, the binned values are positive</returns>
public IEnumerable<KeyValuePair<int, int>> Binned(double[] binUpperBounds, int length)
{
Contracts.Assert(Utils.Size(binUpperBounds) > 0);
Contracts.Assert(0 <= length);
int zeroBin = Algorithms.FindFirstGE(binUpperBounds, 0);
IntArrayBits numBitsNeeded = IntArray.NumBitsNeeded(binUpperBounds.Length);
if (numBitsNeeded == IntArrayBits.Bits0)
yield break;
if (!_isSparse)
{
Contracts.Assert(_dense.Count <= length);
if (ShouldSparsify(_nonZeroElements, length))
Sparsify();
}
if (_isSparse)
{
Contracts.AssertValue(_sparse);
if (zeroBin == 0)
{
// We can skip all implicit values in sparse.
foreach (var kvp in _sparse)
{
Contracts.Assert(kvp.Key < length);
int binned = Algorithms.FindFirstGE(binUpperBounds, kvp.Value);
if (binned > 0)
yield return new KeyValuePair<int, int>(kvp.Key, binned);
}
yield break;
}
Contracts.Assert(zeroBin != 0);
int last = -1;
foreach (var kvp in _sparse)
{
Contracts.Assert(kvp.Key < length);
while (++last < kvp.Key)
yield return new KeyValuePair<int, int>(last, zeroBin);
int binned = Algorithms.FindFirstGE(binUpperBounds, kvp.Value);
if (binned > 0)
yield return new KeyValuePair<int, int>(kvp.Key, binned);
}
while (++last < length)
yield return new KeyValuePair<int, int>(last, zeroBin);
yield break;
}
Contracts.Assert(!_isSparse);
Contracts.AssertValue(_dense);
Contracts.Assert(_dense.Count <= length);
for (int i = 0; i < _dense.Count; ++i)
{
int binned = Algorithms.FindFirstGE(binUpperBounds, _dense[i]);
if (binned > 0)
yield return new KeyValuePair<int, int>(i, binned);
}
if (zeroBin > 0)
{
for (int i = _dense.Count; i < length; ++i)
yield return new KeyValuePair<int, int>(i, zeroBin);
}
}
public sealed class ForwardIndexer
{
// All of the _values list. We are only addressing _min through _lim.
private readonly ValuesList[] _values;
// Parallel to the subsequence of _values in min to lim, indicates the index where
// we should start to look for the next value, if the corresponding value list in
// _values is sparse. If the corresponding value list is dense the entry at this
// position is not used.
private readonly int[] _perFeaturePosition;
private readonly int[] _featureIndices;
#if DEBUG
// Holds for each feature the row index that it was previously accessed on.
// Purely for validation purposes.
private int[] _lastRow;
#endif
/// <summary>
/// Access the value of a particular feature, at a particular row.
/// </summary>
/// <param name="featureIndex">A feature index, which indexes not the global feature indices,
/// but the index into the subset of features specified at the constructor time</param>
/// <param name="rowIndex">The row index to access, which must be non-decreasing, and must
/// indeed be actually increasing for access on the same feature (for example, if you have two features,
/// it is OK to access <c>[1, 5]</c>, then <c>[0, 5]</c>, but once this is done you cannot
/// access the same feature at the same position.</param>
/// <returns></returns>
public Double this[int featureIndex, int rowIndex]
{
get
{
Contracts.Assert(0 <= featureIndex && featureIndex < _featureIndices.Length);
Contracts.Assert(rowIndex >= 0);
var values = _values[_featureIndices[featureIndex]];
#if DEBUG
int lastRow = _lastRow[featureIndex];
Contracts.Assert(rowIndex > lastRow);
_lastRow[featureIndex] = rowIndex;
#endif
if (!values._isSparse)
return rowIndex < values._dense.Count ? values._dense[rowIndex] : 0;
int last = _perFeaturePosition[featureIndex];
var sp = values._sparse;
#if DEBUG
// The next value of _sparse (assuming there is one) should have been past the last access.
// That is, sp[last].Key, if it exist, must be greater than lastRow.
Contracts.Assert(sp.Count == 0 || sp[last].Key > lastRow);
#endif
while (last < sp.Count)
{
var s = sp[last++];
if (s.Key < rowIndex)
continue;
if (s.Key > rowIndex)
{
// We'd previously put last past this element,
// have to put it back a bit.
last--;
break;
}
Contracts.Assert(s.Key == rowIndex);
_perFeaturePosition[featureIndex] = last;
return s.Value;
}
_perFeaturePosition[featureIndex] = last;
return 0;
}
}
/// <summary>
/// Initialize a forward indexer.
/// </summary>
/// <param name="values">Holds the values of the features</param>
/// <param name="features">The array of feature indices this will index</param>
/// <param name="workArray">A possibly shared working array, once used by this forward
/// indexer it should not be used in any previously created forward indexer</param>
public ForwardIndexer(ValuesList[] values, int[] features, ref int[] workArray)
{
Contracts.AssertValue(values);
Contracts.AssertValueOrNull(workArray);
Contracts.AssertValue(features);
Contracts.Assert(Utils.IsIncreasing(0, features, values.Length));
Contracts.Assert(features.All(i => values[i] != null));
_values = values;
_featureIndices = features;
Utils.EnsureSize(ref workArray, _featureIndices.Length, keepOld: false);
Contracts.AssertValue(workArray); // Should be initialized now.
_perFeaturePosition = workArray;
Array.Clear(_perFeaturePosition, 0, _featureIndices.Length);
#if DEBUG
_lastRow = new int[features.Length];
for (int i = 0; i < _lastRow.Length; ++i)
_lastRow[i] = -1;
#endif
}
}
}
}
internal sealed class ExamplesToFastTreeBins
{
private readonly int _maxBins;
private readonly Float _maxLabel;
private readonly IHost _host;
private readonly bool _diskTranspose;
private readonly bool _noFlocks;
private readonly int _minDocsPerLeaf;
/// <summary> Bin boundaries </summary>
public double[][] BinUpperBounds
{
get;
private set;
}
public int[] FeatureMap { get; private set; }
public ExamplesToFastTreeBins(IHostEnvironment env, int maxBins, bool diskTranspose, bool noFlocks, int minDocsPerLeaf, Float maxLabel)
{
Contracts.AssertValue(env);
_host = env.Register("Converter");
_maxBins = maxBins;
_maxLabel = maxLabel;
_diskTranspose = diskTranspose;
_noFlocks = noFlocks;
_minDocsPerLeaf = minDocsPerLeaf;
}
public Dataset FindBinsAndReturnDataset(RoleMappedData data, PredictionKind kind, IParallelTraining parallelTraining,
int[] categoricalFeaturIndices, bool categoricalSplit)
{
using (var ch = _host.Start("InitDataset"))
{
ch.Info("Making per-feature arrays");
var convData = DataConverter.Create(data, _host, _maxBins, _maxLabel, _diskTranspose, _noFlocks,
_minDocsPerLeaf, kind, parallelTraining, categoricalFeaturIndices, categoricalSplit);
ch.Info("Processed {0} instances", convData.NumExamples);
ch.Info("Binning and forming Feature objects");
Dataset d = convData.GetDataset();
BinUpperBounds = convData.BinUpperBounds;
FeatureMap = convData.FeatureMap;
return d;
}
}
public Dataset GetCompatibleDataset(RoleMappedData data, PredictionKind kind, int[] categoricalFeatures, bool categoricalSplit)
{
_host.AssertValue(BinUpperBounds);
var convData = DataConverter.Create(data, _host, BinUpperBounds, _maxLabel, _diskTranspose, _noFlocks, kind,
categoricalFeatures, categoricalSplit);
return convData.GetDataset();
}
}
public abstract class FastTreePredictionWrapper :
PredictorBase<Float>,
IValueMapper,
ICanSaveInTextFormat,
ICanSaveInIniFormat,
ICanSaveInSourceCode,
ICanSaveModel,
ICanSaveSummary,
ICanGetSummaryInKeyValuePairs,
ITreeEnsemble,
IPredictorWithFeatureWeights<Float>,
IWhatTheFeatureValueMapper,
ICanGetSummaryAsIRow,
ISingleCanSavePfa,
ISingleCanSaveOnnx
{
//The below two properties are necessary for tree Visualizer
public Ensemble TrainedEnsemble { get; }
public int NumTrees => TrainedEnsemble.NumTrees;
// Inner args is used only for documentation purposes when saving comments to INI files.
protected readonly string InnerArgs;
// The total number of features used in training (takes the value of zero if the
// written version of the loaded model is less than VerNumFeaturesSerialized)
protected readonly int NumFeatures;
// Maximum index of the split features of trainedEnsemble trees
protected readonly int MaxSplitFeatIdx;
protected abstract uint VerNumFeaturesSerialized { get; }
protected abstract uint VerDefaultValueSerialized { get; }
protected abstract uint VerCategoricalSplitSerialized { get; }
public ColumnType InputType { get; }
public ColumnType OutputType => NumberType.Float;
public bool CanSavePfa => true;
public bool CanSaveOnnx => true;
protected FastTreePredictionWrapper(IHostEnvironment env, string name, Ensemble trainedEnsemble, int numFeatures, string innerArgs)
: base(env, name)
{
Host.CheckValue(trainedEnsemble, nameof(trainedEnsemble));
Host.CheckParam(numFeatures > 0, nameof(numFeatures), "must be positive");
Host.CheckValueOrNull(innerArgs);
// REVIEW: When we make the predictor wrapper, we may want to further "optimize"
// the trained ensemble to, for instance, resize arrays so that they are of the length
// the actual number of leaves/nodes, or remove unnecessary arrays, and so forth.
TrainedEnsemble = trainedEnsemble;
InnerArgs = innerArgs;
NumFeatures = numFeatures;
MaxSplitFeatIdx = FindMaxFeatureIndex(trainedEnsemble);
Contracts.Assert(NumFeatures > MaxSplitFeatIdx);
InputType = new VectorType(NumberType.Float, NumFeatures);
}
protected FastTreePredictionWrapper(IHostEnvironment env, string name, ModelLoadContext ctx, VersionInfo ver)
: base(env, name, ctx)
{
// *** Binary format ***
// Ensemble
// int: Inner args string id
// int: Number of features (VerNumFeaturesSerialized)
// <PredictionKind> specific stuff
ctx.CheckVersionInfo(ver);
bool usingDefaultValues = false;
bool categoricalSplits = false;
if (ctx.Header.ModelVerWritten >= VerDefaultValueSerialized)
usingDefaultValues = true;
if (ctx.Header.ModelVerWritten >= VerCategoricalSplitSerialized)
categoricalSplits = true;
TrainedEnsemble = new Ensemble(ctx, usingDefaultValues, categoricalSplits);
MaxSplitFeatIdx = FindMaxFeatureIndex(TrainedEnsemble);
InnerArgs = ctx.LoadStringOrNull();
if (ctx.Header.ModelVerWritten >= VerNumFeaturesSerialized)
{
NumFeatures = ctx.Reader.ReadInt32();
// It is possible that the number of features is 0 when an old model is loaded and then saved with the new version.
Host.CheckDecode(NumFeatures >= 0);
}
// In the days of TLC <= 2.7 before we had a data pipeline, there was
// some auxiliary structure called the "ContentMap." This structure is
// no longer necessary or helpful since the data pipeline is in
// TLC >= 3.0 supposed to be independent of any predictor specific
// tricks.
InputType = new VectorType(NumberType.Float, NumFeatures);
}
protected override void SaveCore(ModelSaveContext ctx)
{
base.SaveCore(ctx);
// *** Binary format ***
// Ensemble
// int: Inner args string id
// int: Number of features (VerNumFeaturesSerialized)
// <PredictionKind> specific stuff
TrainedEnsemble.Save(ctx);
ctx.SaveStringOrNull(InnerArgs);
Host.Assert(NumFeatures >= 0);
ctx.Writer.Write(NumFeatures);
}
public ValueMapper<TIn, TOut> GetMapper<TIn, TOut>()
{
Host.Check(typeof(TIn) == typeof(VBuffer<Float>));
Host.Check(typeof(TOut) == typeof(Float));
ValueMapper<VBuffer<Float>, Float> del = Map;
return (ValueMapper<TIn, TOut>)(Delegate)del;
}
protected virtual void Map(ref VBuffer<Float> src, ref Float dst)
{
if (InputType.VectorSize > 0)
Host.Check(src.Length == InputType.VectorSize);
else
Host.Check(src.Length > MaxSplitFeatIdx);
dst = (Float)TrainedEnsemble.GetOutput(ref src);
}
public ValueMapper<TSrc, VBuffer<Float>> GetWhatTheFeatureMapper<TSrc, TDst>(int top, int bottom, bool normalize)
{
Host.Check(typeof(TSrc) == typeof(VBuffer<Float>));
Host.Check(typeof(TDst) == typeof(VBuffer<Float>));
Host.Check(top >= 0, "top must be non-negative");
Host.Check(bottom >= 0, "bottom must be non-negative");
BufferBuilder<Float> builder = null;
ValueMapper<VBuffer<Float>, VBuffer<Float>> del =
(ref VBuffer<Float> src, ref VBuffer<Float> dst) =>
{
WhatTheFeatureMap(ref src, ref dst, ref builder);
Numeric.VectorUtils.SparsifyNormalize(ref dst, top, bottom, normalize);
};
return (ValueMapper<TSrc, VBuffer<Float>>)(Delegate)del;
}
private void WhatTheFeatureMap(ref VBuffer<Float> src, ref VBuffer<Float> dst, ref BufferBuilder<Float> builder)
{
if (InputType.VectorSize > 0)
Host.Check(src.Length == InputType.VectorSize);
else
Host.Check(src.Length > MaxSplitFeatIdx);
TrainedEnsemble.GetFeatureContributions(ref src, ref dst, ref builder);
}
/// <summary>
/// write out a C# representation of the ensemble
/// </summary>
public void SaveAsCode(TextWriter writer, RoleMappedSchema schema)
{
Host.CheckValueOrNull(schema);
SaveEnsembleAsCode(writer, schema);
}
/// <summary>
/// Output the INI model to a given writer
/// </summary>
public void SaveAsText(TextWriter writer, RoleMappedSchema schema)
{
Host.CheckValue(writer, nameof(writer));
Host.CheckValueOrNull(schema);
SaveAsIni(writer, schema);
}
/// <summary>
/// Output the INI model to a given writer
/// </summary>
public void SaveAsIni(TextWriter writer, RoleMappedSchema schema, ICalibrator calibrator = null)
{
Host.CheckValue(writer, nameof(writer));
Host.CheckValue(schema, nameof(schema));
Host.CheckValueOrNull(calibrator);
string ensembleIni = TrainedEnsemble.ToTreeEnsembleIni(new FeaturesToContentMap(schema),
InnerArgs, appendFeatureGain: true, includeZeroGainFeatures: false);
ensembleIni = AddCalibrationToIni(ensembleIni, calibrator);
writer.WriteLine(ensembleIni);
}
/// <summary>
/// Get the calibration summary in INI format
/// </summary>
private string AddCalibrationToIni(string ini, ICalibrator calibrator)
{
Host.AssertValue(ini);
Host.AssertValueOrNull(calibrator);
if (calibrator == null)
return ini;
if (calibrator is PlattCalibrator)
{
string calibratorEvaluatorIni = IniFileUtils.GetCalibratorEvaluatorIni(ini, calibrator as PlattCalibrator);
return IniFileUtils.AddEvaluator(ini, calibratorEvaluatorIni);
}
else
{
StringBuilder newSection = new StringBuilder();
newSection.AppendLine();
newSection.AppendLine();
newSection.AppendLine("[TLCCalibration]");
newSection.AppendLine("Type=" + calibrator.GetType().Name);
return ini + newSection;
}
}
public JToken SaveAsPfa(BoundPfaContext ctx, JToken input)
{
Host.CheckValue(ctx, nameof(ctx));
Host.CheckValue(input, nameof(input));
return TrainedEnsemble.AsPfa(ctx, input);
}
private enum NodeMode
{
[Description("BRANCH_LEQ")]
BranchLEq,
[Description("BRANCH_LT")]
BranchLT,
[Description("BRANCH_GTE")]
BranchGte,
[Description("BRANCH_GT")]
BranchGT,
[Description("BRANCH_EQ")]
BranchEq,
[Description("BRANCH_LT")]
BranchNeq,
[Description("LEAF")]
Leaf
};
private enum PostTransform
{
[Description("NONE")]
None,
[Description("SOFTMAX")]
SoftMax,
[Description("LOGISTIC")]
Logstic,
[Description("SOFTMAX_ZERO")]
SoftMaxZero
}
private enum AggregateFunction
{
[Description("AVERAGE")]
Average,
[Description("SUM")]
Sum,
[Description("MIN")]
Min,
[Description("MAX")]
Max
}
public virtual bool SaveAsOnnx(OnnxContext ctx, string[] outputNames, string featureColumn)
{
Host.CheckValue(ctx, nameof(ctx));
//Nodes.
var nodesTreeids = new List<long>();
var nodesIds = new List<long>();
var nodesFeatureIds = new List<long>();
var nodeModes = new List<string>();
var nodesValues = new List<double>();
var nodeHitrates = new List<long>();
var missingValueTracksTrue = new List<bool>();
var nodesTrueNodeIds = new List<long>();
var nodesFalseNodeIds = new List<long>();
var nodesBaseValues = new List<float>();
//Leafs.
var classTreeIds = new List<long>();
var classNodeIds = new List<long>();
var classIds = new List<long>();
var classWeights = new List<double>();
int treeIndex = -1;
foreach (var tree in TrainedEnsemble.Trees)
{
treeIndex++;
for (int nodeIndex = 0; nodeIndex < tree.NumNodes; nodeIndex++)
{
nodesTreeids.Add(treeIndex);
nodeModes.Add(NodeMode.BranchLEq.GetDescription());
nodesIds.Add(nodeIndex);
nodesFeatureIds.Add(tree.SplitFeature(nodeIndex));
nodesValues.Add(tree.RawThresholds[nodeIndex]);
nodesTrueNodeIds.Add(tree.LteChild[nodeIndex] < 0 ? ~tree.LteChild[nodeIndex] + tree.NumNodes : tree.LteChild[nodeIndex]);
nodesFalseNodeIds.Add(tree.GtChild[nodeIndex] < 0 ? ~tree.GtChild[nodeIndex] + tree.NumNodes : tree.GtChild[nodeIndex]);
if (tree.DefaultValueForMissing?[nodeIndex] <= tree.RawThresholds[nodeIndex])
missingValueTracksTrue.Add(true);
else
missingValueTracksTrue.Add(false);
nodeHitrates.Add(0);
}
for (int leafIndex = 0; leafIndex < tree.NumLeaves; leafIndex++)
{
int nodeIndex = tree.NumNodes + leafIndex;
nodesTreeids.Add(treeIndex);
nodesBaseValues.Add(0);
nodeModes.Add(NodeMode.Leaf.GetDescription());
nodesIds.Add(nodeIndex);
nodesFeatureIds.Add(0);
nodesValues.Add(0);
nodesTrueNodeIds.Add(0);
nodesFalseNodeIds.Add(0);
missingValueTracksTrue.Add(false);
nodeHitrates.Add(0);
classTreeIds.Add(treeIndex);
classNodeIds.Add(nodeIndex);
classIds.Add(0);
classWeights.Add(tree.LeafValues[leafIndex]);
}
}
string opType = "TreeEnsembleRegressor";
var node = ctx.CreateNode(opType, new[] { featureColumn }, outputNames, ctx.GetNodeName(opType));
node.AddAttribute("post_transform", PostTransform.None.GetDescription());
node.AddAttribute("n_targets", 1);
node.AddAttribute("base_values", new List<float>() { 0 });
node.AddAttribute("aggregate_function", AggregateFunction.Sum.GetDescription());
node.AddAttribute("nodes_treeids", nodesTreeids);
node.AddAttribute("nodes_nodeids", nodesIds);
node.AddAttribute("nodes_featureids", nodesFeatureIds);
node.AddAttribute("nodes_modes", nodeModes);
node.AddAttribute("nodes_values", nodesValues);
node.AddAttribute("nodes_truenodeids", nodesTrueNodeIds);
node.AddAttribute("nodes_falsenodeids", nodesFalseNodeIds);
node.AddAttribute("nodes_missing_value_tracks_true", missingValueTracksTrue);
node.AddAttribute("target_treeids", classTreeIds);
node.AddAttribute("target_nodeids", classNodeIds);
node.AddAttribute("target_ids", classIds);
node.AddAttribute("target_weights", classWeights);
return true;
}
public void SaveSummary(TextWriter writer, RoleMappedSchema schema)
{
writer.WriteLine();
writer.WriteLine("Per-feature gain summary for the boosted tree ensemble:");
foreach (var pair in GetSummaryInKeyValuePairs(schema))
{
Host.Assert(pair.Value is Double);
writer.WriteLine("\t{0}\t{1}", pair.Key, (Double)pair.Value);
}
}
private IEnumerable<KeyValuePair<string, Double>> GetSortedFeatureGains(RoleMappedSchema schema)
{
var gainMap = new FeatureToGainMap(TrainedEnsemble.Trees.ToList(), normalize: true);
var names = default(VBuffer<ReadOnlyMemory<char>>);
MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, NumFeatures, ref names);
var ordered = gainMap.OrderByDescending(pair => pair.Value);
Double max = ordered.FirstOrDefault().Value;
Double normFactor = max == 0 ? 1.0 : (1.0 / Math.Sqrt(max));
foreach (var pair in ordered)
{
var name = names.GetItemOrDefault(pair.Key).ToString();
if (string.IsNullOrEmpty(name))
name = $"f{pair.Key}";
yield return new KeyValuePair<string, Double>(name, Math.Sqrt(pair.Value) * normFactor);
}
}
///<inheritdoc/>
public IList<KeyValuePair<string, object>> GetSummaryInKeyValuePairs(RoleMappedSchema schema)
{
List<KeyValuePair<string, object>> results = new List<KeyValuePair<string, object>>();
var ordered = GetSortedFeatureGains(schema);
foreach (var pair in ordered)
results.Add(new KeyValuePair<string, object>(pair.Key, pair.Value));
return results;
}
/// <summary>
/// returns a C# representation of the ensemble
/// </summary>
private void SaveEnsembleAsCode(TextWriter writer, RoleMappedSchema schema)
{
Host.AssertValueOrNull(schema);
var names = default(VBuffer<ReadOnlyMemory<char>>);
MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, NumFeatures, ref names);
int i = 0;
foreach (RegressionTree tree in TrainedEnsemble.Trees)
{
writer.Write("double treeOutput{0}=", i);
SaveTreeAsCode(tree, writer, ref names);
writer.Write(";\n");
i++;
}
writer.Write("double output = ");
for (int j = 0; j < i; j++)
writer.Write((j > 0 ? "+" : "") + "treeOutput" + j);
writer.Write(";");
}
/// <summary>
/// Convert a single tree to code, called recursively
/// </summary>
private void SaveTreeAsCode(RegressionTree tree, TextWriter writer, ref VBuffer<ReadOnlyMemory<char>> names)
{
ToCSharp(tree, writer, 0, ref names);
}
// converts a subtree into a C# expression
private void ToCSharp(RegressionTree tree, TextWriter writer, int node, ref VBuffer<ReadOnlyMemory<char>> names)
{
if (node < 0)
{
writer.Write(FloatUtils.ToRoundTripString(tree.LeafValue(~node)));
//_output[~node].ToString());
}
else
{
var name = names.GetItemOrDefault(tree.SplitFeature(node)).ToString();
if (string.IsNullOrEmpty(name))
name = $"f{tree.SplitFeature(node)}";
writer.Write("(({0} > {1}) ? ", name, FloatUtils.ToRoundTripString(tree.RawThreshold(node)));
ToCSharp(tree, writer, tree.GetGtChildForNode(node), ref names);
writer.Write(" : ");
ToCSharp(tree, writer, tree.GetLteChildForNode(node), ref names);
writer.Write(")");
}
}
public void GetFeatureWeights(ref VBuffer<Float> weights)
{
var numFeatures = Math.Max(NumFeatures, MaxSplitFeatIdx + 1);
FeatureToGainMap gainMap = new FeatureToGainMap(TrainedEnsemble.Trees.ToList(), normalize: true);
// If there are no trees or no splits, there are no gains.
if (gainMap.Count == 0)
{
weights = new VBuffer<Float>(numFeatures, 0, weights.Values, weights.Indices);
return;
}
Double max = gainMap.Values.Max();
Double normFactor = max == 0 ? 1.0 : (1.0 / Math.Sqrt(max));
var bldr = new BufferBuilder<Float>(R4Adder.Instance);
bldr.Reset(numFeatures, false);
foreach (var pair in gainMap)
bldr.AddFeature(pair.Key, (Float)(Math.Sqrt(pair.Value) * normFactor));
bldr.GetResult(ref weights);
}
private static int FindMaxFeatureIndex(Ensemble ensemble)
{
int ifeatMax = 0;
for (int i = 0; i < ensemble.NumTrees; i++)
{
var tree = ensemble.GetTreeAt(i);
for (int n = 0; n < tree.NumNodes; n++)
{
int ifeat = tree.SplitFeature(n);
if (ifeat > ifeatMax)
ifeatMax = ifeat;
}
}
return ifeatMax;
}
public ITree[] GetTrees()
{
return TrainedEnsemble.Trees.Select(k => new Tree(k)).ToArray();
}
public Float GetLeafValue(int treeId, int leafId)
{
return (Float)TrainedEnsemble.GetTreeAt(treeId).LeafValue(leafId);
}
/// <summary>
/// Returns the leaf node in the requested tree for the given feature vector, and populates 'path' with the list of
/// internal nodes in the path from the root to that leaf. If 'path' is null a new list is initialized. All elements
/// in 'path' are cleared before filling in the current path nodes.
/// </summary>
public int GetLeaf(int treeId, ref VBuffer<Float> features, ref List<int> path)
{
return TrainedEnsemble.GetTreeAt(treeId).GetLeaf(ref features, ref path);
}
public IRow GetSummaryIRowOrNull(RoleMappedSchema schema)
{
var names = default(VBuffer<ReadOnlyMemory<char>>);
MetadataUtils.GetSlotNames(schema, RoleMappedSchema.ColumnRole.Feature, NumFeatures, ref names);
var slotNamesCol = RowColumnUtils.GetColumn(MetadataUtils.Kinds.SlotNames,
new VectorType(TextType.Instance, NumFeatures), ref names);
var slotNamesRow = RowColumnUtils.GetRow(null, slotNamesCol);
var weights = default(VBuffer<Single>);
GetFeatureWeights(ref weights);
return RowColumnUtils.GetRow(null, RowColumnUtils.GetColumn("Gains", new VectorType(NumberType.R4, NumFeatures), ref weights, slotNamesRow));
}
public IRow GetStatsIRowOrNull(RoleMappedSchema schema)
{
return null;
}
private sealed class Tree : ITree<VBuffer<Float>>
{
private readonly RegressionTree _regTree;
public Tree(RegressionTree regTree)
{
_regTree = regTree;
}
public int[] GtChild => _regTree.GtChild;
public int[] LteChild => _regTree.LteChild;
public int NumNodes => _regTree.NumNodes;
public int NumLeaves => _regTree.NumLeaves;
public int GetLeaf(ref VBuffer<Float> feat)
{
return _regTree.GetLeaf(ref feat);
}
public INode GetNode(int nodeId, bool isLeaf, IEnumerable<string> featuresNames = null)
{
var keyValues = new Dictionary<string, object>();
if (isLeaf)
{
keyValues.Add(NodeKeys.LeafValue, _regTree.LeafValue(nodeId));
}
else
{
if (featuresNames != null)
{
if (featuresNames is FeatureNameCollection features)
{
if (_regTree.CategoricalSplit[nodeId])
{
string featureList = string.Join(" OR \n",
_regTree.CategoricalSplitFeatures[nodeId].Select(feature => features[feature]));
keyValues.Add(NodeKeys.SplitName, featureList);
}
else
keyValues.Add(NodeKeys.SplitName, features[_regTree.SplitFeature(nodeId)]);
}
}
keyValues.Add(NodeKeys.Threshold, string.Format("<= {0}", _regTree.RawThreshold(nodeId)));
if (_regTree.SplitGains != null)
keyValues.Add(NodeKeys.SplitGain, _regTree.SplitGains[nodeId]);
if (_regTree.GainPValues != null)
keyValues.Add(NodeKeys.GainValue, _regTree.GainPValues[nodeId]);
if (_regTree.PreviousLeafValues != null)
keyValues.Add(NodeKeys.PreviousLeafValue, _regTree.PreviousLeafValues[nodeId]);
}
return new TreeNode(keyValues);
}
public double GetLeafValue(int leafId)
{
return _regTree.LeafValue(leafId);
}
}
private sealed class TreeNode : INode
{
private readonly Dictionary<string, object> _keyValues;
public TreeNode(Dictionary<string, object> keyValues)
{
_keyValues = keyValues;
}
public Dictionary<string, object> KeyValues { get { return _keyValues; } }
}
}
}
| 47.683304 | 210 | 0.514125 | [
"MIT"
] | forcesk/machinelearning | src/Microsoft.ML.FastTree/FastTree.cs | 163,363 | C# |
/**
* @file TestSuite.cs
* @author Longwei Lai<[email protected]>
* @date 2016/01/25
* @version 1.0
*
* @brief
*/
using System;
using System.Collections.Generic;
using System.Text;
using llbc;
using System.Reflection;
namespace csllbc_testsuite
{
class Program
{
static void Main(string[] args)
{
LibIniter.Init(Assembly.GetExecutingAssembly());
ITestCase testCase = null;
// Common testcases:
testCase = new TestCase_Com_LibConfig();
// testCase = new TestCase_Com_SafeConsole();
// Core testcases.
// testCase = new TestCase_Core_Log_Logger();
// testCase = new TestCase_Core_Config_Ini();
// Communication testcases:
// testCase = new TestCase_Comm_Timer();
// testCase = new TestCase_Comm_Service();
testCase.Run(args);
LibIniter.Destroy();
}
}
}
| 22 | 60 | 0.580579 | [
"MIT"
] | caochunxi/llbc | wrap/csllbc/testsuite/TestSuite.cs | 970 | C# |
using Domain.Entities;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Persistence.Configuration
{
public class ConfiguracionTelefono : IEntityTypeConfiguration<Telefono>
{
public void Configure(EntityTypeBuilder<Telefono> entity)
{
entity.HasKey(e => e.IdTelefono);
entity.ToTable("Telefono");
entity.HasComment("Todos los Telefonos de personas y empresas");
entity.HasIndex(e => new { e.CodigoPais, e.Numero }, "IX_NoDuplicadoCodigoPaisNumero")
.IsUnique();
entity.Property(e => e.IdTelefono).HasComment("Identificador unico de la tabla telefono");
entity.Property(e => e.CodigoPais)
.IsRequired()
.HasMaxLength(4)
.IsUnicode(false)
.HasComment("Codigo Telefonico del Pais");
entity.Property(e => e.EsHabilitado).HasComment("Si el registro esta habilitado para trabajar");
entity.Property(e => e.Estatus)
.IsRequired()
.HasMaxLength(1)
.IsUnicode(false)
.IsFixedLength(true)
.HasComment("Estatus del registro");
entity.Property(e => e.FechaCreacion)
.HasColumnType("datetime")
.HasComment("Fecha de creacion del registro");
entity.Property(e => e.FechaModificacion)
.HasColumnType("datetime")
.HasComment("Ultima fecha de Modificacion del registro");
entity.Property(e => e.Numero)
.IsRequired()
.HasMaxLength(15)
.IsUnicode(false)
.HasComment("Numero telefonico");
entity.Property(e => e.TipoTelefono)
.IsRequired()
.HasMaxLength(50)
.IsUnicode(false)
.HasComment("Celular o Fijo");
entity.Property(e => e.UsuarioCreacion)
.IsRequired()
.HasMaxLength(50)
.IsUnicode(false)
.HasComment("Usuario que creo el registro");
entity.Property(e => e.UsuarioModificacion)
.IsRequired()
.HasMaxLength(50)
.IsUnicode(false)
.HasComment("Ultimo usuario que modifico el registro");
}
}
}
| 33.5 | 108 | 0.566379 | [
"MIT"
] | NSysX/NSysWeb | NSysWeb/src/Infraestructure/Persistence/Configuration/ConfiguracionTelefono.cs | 2,548 | C# |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using Azure.Core;
using System;
using System.Reflection;
using NUnit.Framework;
using Azure.Core.TestFramework;
using Azure.Identity.Tests.Mock;
using System.Threading.Tasks;
namespace Azure.Identity.Tests
{
public class EnvironmentCredentialProviderTests : ClientTestBase
{
public EnvironmentCredentialProviderTests(bool isAsync) : base(isAsync)
{
}
[NonParallelizable]
[Test]
public void CredentialConstructionClientSecret()
{
string clientIdBackup = Environment.GetEnvironmentVariable("AZURE_CLIENT_ID");
string tenantIdBackup = Environment.GetEnvironmentVariable("AZURE_TENANT_ID");
string clientSecretBackup = Environment.GetEnvironmentVariable("AZURE_CLIENT_SECRET");
try
{
Environment.SetEnvironmentVariable("AZURE_CLIENT_ID", "mockclientid");
Environment.SetEnvironmentVariable("AZURE_TENANT_ID", "mocktenantid");
Environment.SetEnvironmentVariable("AZURE_CLIENT_SECRET", "mockclientsecret");
var provider = new EnvironmentCredential();
ClientSecretCredential cred = provider.Credential as ClientSecretCredential;
Assert.NotNull(cred);
Assert.AreEqual("mockclientid", cred.ClientId);
Assert.AreEqual("mocktenantid", cred.TenantId);
Assert.AreEqual("mockclientsecret", cred.ClientSecret);
}
finally
{
Environment.SetEnvironmentVariable("AZURE_CLIENT_ID", clientIdBackup);
Environment.SetEnvironmentVariable("AZURE_TENANT_ID", tenantIdBackup);
Environment.SetEnvironmentVariable("AZURE_CLIENT_SECRET", clientSecretBackup);
}
}
[NonParallelizable]
[Test]
public void CredentialConstructionClientCertificate()
{
string clientIdBackup = Environment.GetEnvironmentVariable("AZURE_CLIENT_ID");
string tenantIdBackup = Environment.GetEnvironmentVariable("AZURE_TENANT_ID");
string clientCertificateLocationBackup = Environment.GetEnvironmentVariable("AZURE_CLIENT_CERTIFICATE_PATH");
try
{
Environment.SetEnvironmentVariable("AZURE_CLIENT_ID", "mockclientid");
Environment.SetEnvironmentVariable("AZURE_TENANT_ID", "mocktenantid");
Environment.SetEnvironmentVariable("AZURE_CLIENT_CERTIFICATE_PATH", "mockcertificatepath");
var provider = new EnvironmentCredential();
var cred = provider.Credential as ClientCertificateCredential;
Assert.NotNull(cred);
Assert.AreEqual("mockclientid", cred.ClientId);
Assert.AreEqual("mocktenantid", cred.TenantId);
var certProvider = cred.ClientCertificateProvider as ClientCertificateCredential.X509Certificate2FromFileProvider;
Assert.NotNull(certProvider);
Assert.AreEqual("mockcertificatepath", certProvider.CertificatePath);
}
finally
{
Environment.SetEnvironmentVariable("AZURE_CLIENT_ID", clientIdBackup);
Environment.SetEnvironmentVariable("AZURE_TENANT_ID", tenantIdBackup);
Environment.SetEnvironmentVariable("AZURE_CLIENT_CERTIFICATE_PATH", clientCertificateLocationBackup);
}
}
[Test]
public void EnvironmentCredentialUnavailableException()
{
var credential = InstrumentClient(new EnvironmentCredential(CredentialPipeline.GetInstance(null), null));
Assert.ThrowsAsync<CredentialUnavailableException>(async () => await credential.GetTokenAsync(new TokenRequestContext(MockScopes.Default)));
}
[Test]
public async Task EnvironmentCredentialAuthenticationFailedException()
{
string expectedInnerExMessage = Guid.NewGuid().ToString();
var mockMsalClient = new MockMsalConfidentialClient(new MockClientException(expectedInnerExMessage));
ClientSecretCredential innerCred = new ClientSecretCredential(Guid.NewGuid().ToString(), Guid.NewGuid().ToString(), Guid.NewGuid().ToString(), default, default, mockMsalClient);
var credential = InstrumentClient(new EnvironmentCredential(CredentialPipeline.GetInstance(null), innerCred));
var ex = Assert.ThrowsAsync<AuthenticationFailedException>(async () => await credential.GetTokenAsync(new TokenRequestContext(MockScopes.Default)));
Assert.IsInstanceOf(typeof(MockClientException), ex.InnerException);
Assert.AreEqual(expectedInnerExMessage, ex.InnerException.Message);
await Task.CompletedTask;
}
}
}
| 42.230769 | 189 | 0.678 | [
"MIT"
] | AWESOME-S-MINDSET/azure-sdk-for-net | sdk/identity/Azure.Identity/tests/EnvironmentCredentialProviderTests.cs | 4,943 | C# |
// <copyright file="Executable.cs" company="WebDriver Committers">
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Security.Permissions;
using System.Text;
using Microsoft.Win32;
using OpenQA.Selenium.Internal;
namespace OpenQA.Selenium.Firefox.Internal
{
/// <summary>
/// Represents the executable file for Firefox.
/// </summary>
internal class Executable
{
private readonly string binaryInDefaultLocationForPlatform;
private string binaryLocation;
/// <summary>
/// Initializes a new instance of the <see cref="Executable"/> class.
/// </summary>
/// <param name="userSpecifiedBinaryPath">The path and file name to the Firefox executable.</param>
public Executable(string userSpecifiedBinaryPath)
{
if (!string.IsNullOrEmpty(userSpecifiedBinaryPath))
{
// It should exist and be a file.
if (File.Exists(userSpecifiedBinaryPath))
{
this.binaryLocation = userSpecifiedBinaryPath;
return;
}
throw new WebDriverException(
"Specified firefox binary location does not exist or is not a real file: " +
userSpecifiedBinaryPath);
}
else
{
this.binaryInDefaultLocationForPlatform = LocateFirefoxBinaryFromPlatform();
}
if (this.binaryInDefaultLocationForPlatform != null && File.Exists(this.binaryInDefaultLocationForPlatform))
{
this.binaryLocation = this.binaryInDefaultLocationForPlatform;
return;
}
throw new WebDriverException("Cannot find Firefox binary in PATH or default install locations. " +
"Make sure Firefox is installed. OS appears to be: " + Platform.CurrentPlatform.ToString());
}
/// <summary>
/// Gets the full path to the executable.
/// </summary>
public string ExecutablePath
{
get { return this.binaryLocation; }
}
/// <summary>
/// Sets the library path for the Firefox executable environment.
/// </summary>
/// <param name="builder">The <see cref="Process"/> used to execute the binary.</param>
[SecurityPermission(SecurityAction.Demand)]
public void SetLibraryPath(Process builder)
{
string propertyName = GetLibraryPathPropertyName();
StringBuilder libraryPath = new StringBuilder();
// If we have an env var set for the path, use it.
string env = GetEnvironmentVariable(propertyName, null);
if (env != null)
{
libraryPath.Append(env).Append(Path.PathSeparator);
}
// Check our extra env vars for the same var, and use it too.
if (builder.StartInfo.EnvironmentVariables.ContainsKey(propertyName))
{
libraryPath.Append(builder.StartInfo.EnvironmentVariables[propertyName]).Append(Path.PathSeparator);
}
// Last, add the contents of the specified system property, defaulting to the binary's path.
// On Snow Leopard, beware of problems the sqlite library
string firefoxLibraryPath = Path.GetFullPath(this.binaryLocation);
if (Platform.CurrentPlatform.IsPlatformType(PlatformType.Mac) && Platform.CurrentPlatform.MinorVersion > 5)
{
libraryPath.Append(Path.PathSeparator);
}
else
{
// Insert the Firefox library path and the path separator at the beginning
// of the path.
libraryPath.Insert(0, Path.PathSeparator).Insert(0, firefoxLibraryPath);
}
// Add the library path to the builder.
if (builder.StartInfo.EnvironmentVariables.ContainsKey(propertyName))
{
builder.StartInfo.EnvironmentVariables[propertyName] = libraryPath.ToString();
}
else
{
builder.StartInfo.EnvironmentVariables.Add(propertyName, libraryPath.ToString());
}
}
/// <summary>
/// Locates the Firefox binary by platform.
/// </summary>
/// <returns>The full path to the binary.</returns>
[SecurityPermission(SecurityAction.Demand)]
private static string LocateFirefoxBinaryFromPlatform()
{
string binary = string.Empty;
if (Platform.CurrentPlatform.IsPlatformType(PlatformType.Windows))
{
#if !NETCOREAPP2_0 && !NETSTANDARD2_0
// NOTE: This code is legacy, and will be removed. It will not be
// fixed for the .NET Core case.
// Look first in HKEY_LOCAL_MACHINE, then in HKEY_CURRENT_USER
// if it's not found there. If it's still not found, look in
// the default install location (C:\Program Files\Mozilla Firefox).
string firefoxRegistryKey = @"SOFTWARE\Mozilla\Mozilla Firefox";
RegistryKey mozillaKey = Registry.LocalMachine.OpenSubKey(firefoxRegistryKey);
if (mozillaKey == null)
{
mozillaKey = Registry.CurrentUser.OpenSubKey(firefoxRegistryKey);
}
if (mozillaKey != null)
{
binary = GetExecutablePathUsingRegistry(mozillaKey);
}
else
{
#endif
// NOTE: Can't use Environment.SpecialFolder.ProgramFilesX86, because .NET 3.5
// doesn't have that member of the enum.
string[] windowsDefaultInstallLocations = new string[]
{
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles), "Mozilla Firefox"),
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles) + " (x86)", "Mozilla Firefox")
};
binary = GetExecutablePathUsingDefaultInstallLocations(windowsDefaultInstallLocations, "Firefox.exe");
#if !NETCOREAPP2_0 && !NETSTANDARD2_0
}
#endif
}
else
{
string[] macDefaultInstallLocations = new string[]
{
"/Applications/Firefox.app/Contents/MacOS",
string.Format(CultureInfo.InvariantCulture, "/Users/{0}/Applications/Firefox.app/Contents/MacOS", Environment.UserName)
};
binary = GetExecutablePathUsingDefaultInstallLocations(macDefaultInstallLocations, "firefox-bin");
if (string.IsNullOrEmpty(binary))
{
// Use "which firefox" for non-Windows OS, and non-Mac OS where
// Firefox is installed in a non-default location.
using (Process proc = new Process())
{
proc.StartInfo.FileName = "which";
proc.StartInfo.Arguments = "firefox";
proc.StartInfo.CreateNoWindow = true;
proc.StartInfo.RedirectStandardOutput = true;
proc.StartInfo.UseShellExecute = false;
proc.Start();
proc.WaitForExit();
binary = proc.StandardOutput.ReadToEnd().Trim();
}
}
}
if (binary != null && File.Exists(binary))
{
return binary;
}
// Didn't find binary in any of the default install locations, so look
// at directories on the user's PATH environment variable.
return FindBinary(new string[] { "firefox3", "firefox" });
}
#if !NETCOREAPP2_0 && !NETSTANDARD2_0
private static string GetExecutablePathUsingRegistry(RegistryKey mozillaKey)
{
// NOTE: This code is legacy, and will be removed. It will not be
// fixed for the .NET Core case.
string currentVersion = (string)mozillaKey.GetValue("CurrentVersion");
if (string.IsNullOrEmpty(currentVersion))
{
throw new WebDriverException("Unable to determine the current version of FireFox using the registry, please make sure you have installed FireFox correctly");
}
RegistryKey currentMain = mozillaKey.OpenSubKey(string.Format(CultureInfo.InvariantCulture, @"{0}\Main", currentVersion));
if (currentMain == null)
{
throw new WebDriverException(
"Unable to determine the current version of FireFox using the registry, please make sure you have installed FireFox correctly");
}
string path = (string)currentMain.GetValue("PathToExe");
if (!File.Exists(path))
{
throw new WebDriverException(
"FireFox executable listed in the registry does not exist, please make sure you have installed FireFox correctly");
}
return path;
}
#endif
private static string GetExecutablePathUsingDefaultInstallLocations(string[] defaultInstallLocations, string exeName)
{
foreach (string defaultInstallLocation in defaultInstallLocations)
{
string fullPath = Path.Combine(defaultInstallLocation, exeName);
if (File.Exists(fullPath))
{
return fullPath;
}
}
return null;
}
/// <summary>
/// Retrieves an environment variable
/// </summary>
/// <param name="name">Name of the variable.</param>
/// <param name="defaultValue">Default value of the variable.</param>
/// <returns>The value of the variable. If no variable with that name is set, returns the default.</returns>
private static string GetEnvironmentVariable(string name, string defaultValue)
{
string value = Environment.GetEnvironmentVariable(name);
if (string.IsNullOrEmpty(value))
{
value = defaultValue;
}
return value;
}
/// <summary>
/// Retrieves the platform specific environment property name which contains the library path.
/// </summary>
/// <returns>The platform specific environment property name which contains the library path.</returns>
private static string GetLibraryPathPropertyName()
{
string libraryPropertyPathName = "LD_LIBRARY_PATH";
if (Platform.CurrentPlatform.IsPlatformType(PlatformType.Windows))
{
libraryPropertyPathName = "PATH";
}
else if (Platform.CurrentPlatform.IsPlatformType(PlatformType.Mac))
{
libraryPropertyPathName = "DYLD_LIBRARY_PATH";
}
return libraryPropertyPathName;
}
/// <summary>
/// Walk a PATH to locate binaries with a specified name. Binaries will be searched for in the
/// order they are provided.
/// </summary>
/// <param name="binaryNames">The binary names to search for.</param>
/// <returns>The first binary found matching that name.</returns>
private static string FindBinary(string[] binaryNames)
{
foreach (string binaryName in binaryNames)
{
string exe = binaryName;
if (Platform.CurrentPlatform.IsPlatformType(PlatformType.Windows))
{
exe += ".exe";
}
string path = FileUtilities.FindFile(exe);
if (!string.IsNullOrEmpty(path))
{
return Path.Combine(path, exe);
}
}
return null;
}
}
}
| 41.501587 | 173 | 0.583952 | [
"Apache-2.0"
] | Acidburn0zzz/selenium | dotnet/src/webdriver/Firefox/Internal/Executable.cs | 13,073 | C# |
using ConversionHelper;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
using BEPUphysics.Entities;
namespace GettingStartedDemo
{
/// <summary>
/// Component that draws a model following the position and orientation of a BEPUphysics entity.
/// </summary>
public class EntityModel : DrawableGameComponent
{
/// <summary>
/// Entity that this model follows.
/// </summary>
Entity entity;
Model model;
/// <summary>
/// Base transformation to apply to the model.
/// </summary>
public BEPUutilities.Matrix Transform;
Matrix[] boneTransforms;
/// <summary>
/// Creates a new EntityModel.
/// </summary>
/// <param name="entity">Entity to attach the graphical representation to.</param>
/// <param name="model">Graphical representation to use for the entity.</param>
/// <param name="transform">Base transformation to apply to the model before moving to the entity.</param>
/// <param name="game">Game to which this component will belong.</param>
public EntityModel(Entity entity, Model model, BEPUutilities.Matrix transform, Game game)
: base(game)
{
this.entity = entity;
this.model = model;
this.Transform = transform;
//Collect any bone transformations in the model itself.
//The default cube model doesn't have any, but this allows the EntityModel to work with more complicated shapes.
boneTransforms = new Matrix[model.Bones.Count];
foreach (ModelMesh mesh in model.Meshes)
{
foreach (BasicEffect effect in mesh.Effects)
{
effect.EnableDefaultLighting();
}
}
}
public override void Draw(GameTime gameTime)
{
//Notice that the entity's worldTransform property is being accessed here.
//This property is returns a rigid transformation representing the orientation
//and translation of the entity combined.
//There are a variety of properties available in the entity, try looking around
//in the list to familiarize yourself with it.
Matrix worldMatrix = MathConverter.Convert(Transform * entity.WorldTransform);
model.CopyAbsoluteBoneTransformsTo(boneTransforms);
foreach (ModelMesh mesh in model.Meshes)
{
foreach (BasicEffect effect in mesh.Effects)
{
effect.World = boneTransforms[mesh.ParentBone.Index] * worldMatrix;
effect.View = MathConverter.Convert((Game as GettingStartedGame).Camera.ViewMatrix);
effect.Projection = MathConverter.Convert((Game as GettingStartedGame).Camera.ProjectionMatrix);
}
mesh.Draw();
}
base.Draw(gameTime);
}
}
}
| 39.894737 | 124 | 0.60686 | [
"Apache-2.0"
] | KakCAT/bepuphysics1 | Documentation/Isolated Demos/GettingStartedDemo/EntityModel.cs | 3,034 | C# |
namespace Charlotte
{
partial class TestFortewaveWin
{
/// <summary>
/// 必要なデザイナー変数です。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 使用中のリソースをすべてクリーンアップします。
/// </summary>
/// <param name="disposing">マネージ リソースが破棄される場合 true、破棄されない場合は false です。</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows フォーム デザイナーで生成されたコード
/// <summary>
/// デザイナー サポートに必要なメソッドです。このメソッドの内容を
/// コード エディターで変更しないでください。
/// </summary>
private void InitializeComponent()
{
this.txtSend = new System.Windows.Forms.TextBox();
this.txtRecv = new System.Windows.Forms.TextBox();
this.lblStatus = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// txtSend
//
this.txtSend.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.txtSend.Location = new System.Drawing.Point(12, 12);
this.txtSend.Multiline = true;
this.txtSend.Name = "txtSend";
this.txtSend.ScrollBars = System.Windows.Forms.ScrollBars.Vertical;
this.txtSend.Size = new System.Drawing.Size(630, 66);
this.txtSend.TabIndex = 0;
this.txtSend.Text = "ここにメッセージを書いて下さい。\r\n1行を1メッセージとして送ります。\r\n送るには ctrl+enter を押して下さい。";
this.txtSend.TextChanged += new System.EventHandler(this.txtSend_TextChanged);
this.txtSend.KeyPress += new System.Windows.Forms.KeyPressEventHandler(this.txtSend_KeyPress);
//
// txtRecv
//
this.txtRecv.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.txtRecv.Location = new System.Drawing.Point(12, 104);
this.txtRecv.Multiline = true;
this.txtRecv.Name = "txtRecv";
this.txtRecv.ScrollBars = System.Windows.Forms.ScrollBars.Both;
this.txtRecv.Size = new System.Drawing.Size(630, 385);
this.txtRecv.TabIndex = 2;
this.txtRecv.Text = "受信メッセージ";
this.txtRecv.TextChanged += new System.EventHandler(this.txtRecv_TextChanged);
//
// lblStatus
//
this.lblStatus.AutoSize = true;
this.lblStatus.Location = new System.Drawing.Point(12, 81);
this.lblStatus.Name = "lblStatus";
this.lblStatus.Size = new System.Drawing.Size(61, 20);
this.lblStatus.TabIndex = 1;
this.lblStatus.Text = "送信可能";
//
// TestFortewaveWin
//
this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 20F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(654, 501);
this.Controls.Add(this.lblStatus);
this.Controls.Add(this.txtRecv);
this.Controls.Add(this.txtSend);
this.Font = new System.Drawing.Font("メイリオ", 9.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(128)));
this.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5);
this.Name = "TestFortewaveWin";
this.SizeGripStyle = System.Windows.Forms.SizeGripStyle.Show;
this.Text = "TestFortewaveWin";
this.FormClosed += new System.Windows.Forms.FormClosedEventHandler(this.TestFortewaveWin_FormClosed);
this.Load += new System.EventHandler(this.TestFortewaveWin_Load);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.TextBox txtSend;
private System.Windows.Forms.TextBox txtRecv;
private System.Windows.Forms.Label lblStatus;
}
}
| 36.85 | 147 | 0.710719 | [
"MIT"
] | stackprobe/UnrealRemoco | UnrealClient/UnrealClient/TestFortewaveWin.Designer.cs | 4,073 | C# |
using System;
using UnityEngine;
using UnityEngine.Playables;
using UnityEngine.Timeline;
namespace Timeline.Samples
{
// A clip for the timeline dilation track.
[Serializable]
public class TimeDilationPlayableAsset : PlayableAsset, ITimelineClipAsset
{
// Using a template for the playable behaviour will allow any serializable fields on the behaviour
// to be animated.
[NoFoldOut]
public TimeDilationBehaviour template = new TimeDilationBehaviour();
// Implementation of ITimelineClipAsset, that tells the timeline editor which
// features this clip supports.
public ClipCaps clipCaps
{
get { return ClipCaps.Extrapolation | ClipCaps.Blending; }
}
// Called to creates a runtime instance of the clip.
public override Playable CreatePlayable(PlayableGraph graph, GameObject owner)
{
// Note that template is passed as a parameter - this
// creates a clone of the template PlayableBehaviour.
return ScriptPlayable<TimeDilationBehaviour>.Create(graph, template);
}
}
}
| 34.666667 | 106 | 0.682692 | [
"MIT"
] | 2PUEG-VRIK/UnityEscapeGame | 2P-UnityEscapeGame/Library/PackageCache/[email protected]/Samples~/Customization/TimeDilation/TimeDilationPlayableAsset.cs | 1,144 | C# |
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.CSharp.Testing;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Testing;
using Microsoft.CodeAnalysis.Testing.Verifiers;
using System.Threading;
using System.Threading.Tasks;
namespace BitwiseToShortCircuitAnalyzer.Test
{
public static partial class CSharpCodeFixVerifier<TAnalyzer, TCodeFix>
where TAnalyzer : DiagnosticAnalyzer, new()
where TCodeFix : CodeFixProvider, new()
{
/// <inheritdoc cref="CodeFixVerifier{TAnalyzer, TCodeFix, TTest, TVerifier}.Diagnostic()"/>
public static DiagnosticResult Diagnostic()
=> CSharpCodeFixVerifier<TAnalyzer, TCodeFix, MSTestVerifier>.Diagnostic();
/// <inheritdoc cref="CodeFixVerifier{TAnalyzer, TCodeFix, TTest, TVerifier}.Diagnostic(string)"/>
public static DiagnosticResult Diagnostic(string diagnosticId)
=> CSharpCodeFixVerifier<TAnalyzer, TCodeFix, MSTestVerifier>.Diagnostic(diagnosticId);
/// <inheritdoc cref="CodeFixVerifier{TAnalyzer, TCodeFix, TTest, TVerifier}.Diagnostic(DiagnosticDescriptor)"/>
public static DiagnosticResult Diagnostic(DiagnosticDescriptor descriptor)
=> CSharpCodeFixVerifier<TAnalyzer, TCodeFix, MSTestVerifier>.Diagnostic(descriptor);
/// <inheritdoc cref="CodeFixVerifier{TAnalyzer, TCodeFix, TTest, TVerifier}.VerifyAnalyzerAsync(string, DiagnosticResult[])"/>
public static async Task VerifyAnalyzerAsync(string source, params DiagnosticResult[] expected)
{
var test = new Test
{
TestCode = source,
};
test.ExpectedDiagnostics.AddRange(expected);
await test.RunAsync(CancellationToken.None);
}
/// <inheritdoc cref="CodeFixVerifier{TAnalyzer, TCodeFix, TTest, TVerifier}.VerifyCodeFixAsync(string, string)"/>
public static async Task VerifyCodeFixAsync(string source, string fixedSource)
=> await VerifyCodeFixAsync(source, DiagnosticResult.EmptyDiagnosticResults, fixedSource);
/// <inheritdoc cref="CodeFixVerifier{TAnalyzer, TCodeFix, TTest, TVerifier}.VerifyCodeFixAsync(string, DiagnosticResult, string)"/>
public static async Task VerifyCodeFixAsync(string source, DiagnosticResult expected, string fixedSource)
=> await VerifyCodeFixAsync(source, new[] { expected }, fixedSource);
/// <inheritdoc cref="CodeFixVerifier{TAnalyzer, TCodeFix, TTest, TVerifier}.VerifyCodeFixAsync(string, DiagnosticResult[], string)"/>
public static async Task VerifyCodeFixAsync(string source, DiagnosticResult[] expected, string fixedSource)
{
var test = new Test
{
TestCode = source,
FixedCode = fixedSource,
};
test.ExpectedDiagnostics.AddRange(expected);
await test.RunAsync(CancellationToken.None);
}
}
} | 44.47541 | 136 | 0.78474 | [
"MIT"
] | electricessence/Bitwise2ShortcutAnalyzer | BitwiseToShortCircuitAnalyzer/BitwiseToShortCircuitAnalyzer.Test/Verifiers/CSharpCodeFixVerifier`2.cs | 2,715 | C# |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Diagnostics;
using Microsoft.Win32.SafeHandles;
using System.Net.Security;
using System.Security.Cryptography.X509Certificates;
namespace System.Net
{
internal static partial class CertificateValidationPal
{
internal static SslPolicyErrors VerifyCertificateProperties(
SafeDeleteContext? securityContext,
X509Chain chain,
X509Certificate2 remoteCertificate,
bool checkCertName,
bool isServer,
string? hostName)
{
return CertificateValidation.BuildChainAndVerifyProperties(chain, remoteCertificate, checkCertName, isServer, hostName);
}
//
// Extracts a remote certificate upon request.
//
private static X509Certificate2? GetRemoteCertificate(SafeDeleteContext? securityContext, bool retrieveChainCertificates, ref X509Chain? chain)
{
bool gotReference = false;
if (securityContext == null)
{
return null;
}
X509Certificate2? result = null;
SafeFreeCertContext? remoteContext = null;
try
{
QueryContextRemoteCertificate(securityContext, out remoteContext);
if (remoteContext != null && !remoteContext.IsInvalid)
{
remoteContext.DangerousAddRef(ref gotReference);
result = new X509Certificate2(remoteContext.DangerousGetHandle());
}
if (retrieveChainCertificates)
{
chain ??= new X509Chain();
using (SafeSharedX509StackHandle chainStack =
Interop.OpenSsl.GetPeerCertificateChain(((SafeDeleteSslContext)securityContext).SslContext))
{
if (!chainStack.IsInvalid)
{
int count = Interop.Crypto.GetX509StackFieldCount(chainStack);
for (int i = 0; i < count; i++)
{
IntPtr certPtr = Interop.Crypto.GetX509StackField(chainStack, i);
if (certPtr != IntPtr.Zero)
{
// X509Certificate2(IntPtr) calls X509_dup, so the reference is appropriately tracked.
X509Certificate2 chainCert = new X509Certificate2(certPtr);
chain.ChainPolicy.ExtraStore.Add(chainCert);
}
}
}
}
}
}
catch
{
result?.Dispose();
throw;
}
finally
{
if (remoteContext != null)
{
if (gotReference)
{
remoteContext.DangerousRelease();
}
remoteContext.Dispose();
}
}
if (NetEventSource.Log.IsEnabled()) NetEventSource.Log.RemoteCertificate(result);
return result;
}
//
// Used only by client SSL code, never returns null.
//
internal static string[] GetRequestCertificateAuthorities(SafeDeleteContext securityContext)
{
using (SafeSharedX509NameStackHandle names = Interop.Ssl.SslGetClientCAList(((SafeDeleteSslContext)securityContext).SslContext))
{
if (names.IsInvalid)
{
return Array.Empty<string>();
}
int nameCount = Interop.Crypto.GetX509NameStackFieldCount(names);
if (nameCount == 0)
{
return Array.Empty<string>();
}
string[] clientAuthorityNames = new string[nameCount];
for (int i = 0; i < nameCount; i++)
{
using (SafeSharedX509NameHandle nameHandle = Interop.Crypto.GetX509NameStackField(names, i))
{
X500DistinguishedName dn = Interop.Crypto.LoadX500Name(nameHandle);
clientAuthorityNames[i] = dn.Name;
}
}
return clientAuthorityNames;
}
}
static partial void CheckSupportsStore(StoreLocation storeLocation, ref bool hasSupport)
{
// There's not currently a LocalMachine\My store on Unix, so don't bother trying
// and having to deal with the exception.
//
// https://github.com/dotnet/runtime/issues/15377 tracks the lack of this store.
if (storeLocation == StoreLocation.LocalMachine)
hasSupport = false;
}
private static X509Store OpenStore(StoreLocation storeLocation)
{
Debug.Assert(storeLocation == StoreLocation.CurrentUser);
X509Store store = new X509Store(StoreName.My, storeLocation);
store.Open(OpenFlags.ReadOnly);
return store;
}
private static int QueryContextRemoteCertificate(SafeDeleteContext securityContext, out SafeFreeCertContext? remoteCertContext)
{
remoteCertContext = null;
try
{
SafeX509Handle remoteCertificate = Interop.OpenSsl.GetPeerCertificate(((SafeDeleteSslContext)securityContext).SslContext);
// Note that cert ownership is transferred to SafeFreeCertContext
remoteCertContext = new SafeFreeCertContext(remoteCertificate);
return 0;
}
catch
{
return -1;
}
}
}
}
| 36.335329 | 151 | 0.529664 | [
"MIT"
] | Ali-YousefiTelori/runtime | src/libraries/System.Net.Security/src/System/Net/CertificateValidationPal.Unix.cs | 6,068 | C# |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class Pickup : MonoBehaviour
{
private Inventory inventory;
private SpriteRenderer spriteRenderer;
private BoxCollider2D boxCollider;
public int itemIndex;
public string itemName;
private Transform player;
private PlayerMovement playerStats;
public float itemSpeed = 1f;
private float maxXVelocity = 0.03f;
private float maxYVelocity = 0.08f;
private float xVelocity;
private float yVelocity;
public float overrideXVelocity = -999;
public float overrideYVelocity = -999;
private float gravity = 0.004f;
private bool hasPlopped = false;
public bool globallyAttracted = false;
public float playerGravity = 0f;
private void Start()
{
if (itemIndex == 0)
Destroy(gameObject);
inventory = GameObject.FindGameObjectWithTag("Player").GetComponent<Inventory>();
playerStats = GameObject.FindGameObjectWithTag("Player").GetComponent<PlayerMovement>();
player = GameObject.FindGameObjectWithTag("Player").transform;
spriteRenderer = GetComponent<SpriteRenderer>();
xVelocity = Random.Range(-maxXVelocity, maxXVelocity);
yVelocity = maxYVelocity;
boxCollider = GetComponent<BoxCollider2D>();
boxCollider.enabled = false;
StartCoroutine(Plop());
}
void FixedUpdate()
{
//Move towards the player
float sqrDistanceFromPlayer = (transform.position - player.position).sqrMagnitude;
if (sqrDistanceFromPlayer < playerStats.sqrItemAttractionRadius && hasPlopped || globallyAttracted)
{
Vector3 newPosition = Vector3.MoveTowards(transform.position, player.position, itemSpeed * Time.deltaTime);
transform.position = newPosition;
}
itemSpeed += playerGravity;
}
//This coroutine will make the item go plop plop
public IEnumerator Plop()
{
int originalSortingOrder = spriteRenderer.sortingOrder;
Vector3 originalPosition = transform.position;
float itemHeight = 0;
float xDistance = 0;
float finalYDistance = Random.Range(-0.1f, 0f);
bool ploppedHeightReached = false;
while (itemHeight >= finalYDistance || !ploppedHeightReached)
{
transform.position = new Vector3(originalPosition.x + xDistance, originalPosition.y + itemHeight, originalPosition.z);
xDistance += xVelocity;
itemHeight += yVelocity;
yVelocity -= gravity;
ploppedHeightReached = yVelocity <= 0 ? true : false;
yield return null;
}
hasPlopped = true;
boxCollider.enabled = true;
spriteRenderer.sortingLayerName = "Wall";
spriteRenderer.sortingOrder = originalSortingOrder;
}
void OnTriggerEnter2D(Collider2D other)
{
//If the player has space in his inventory, destroy this gameobject after updating inventory slot
if (other.CompareTag("Player"))
{
playerGravity = 0f;
itemSpeed = 1f;
if (hasPlopped && inventory.AddItem(itemIndex))
{
Destroy(gameObject);
return;
}
}
}
void FallIntoPond()
{
Destroy(gameObject);
}
}
| 27.942623 | 130 | 0.646524 | [
"MIT"
] | hwelsters/farm-game | Assets/Scripts/Pickup.cs | 3,411 | C# |
using System;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
namespace jQueryDatatableServerSideNetCore22.Data.Migrations
{
public partial class Init : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "TestRegisters",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
Name = table.Column<string>(nullable: true),
FirstSurname = table.Column<string>(nullable: true),
SecondSurname = table.Column<string>(nullable: true),
Street = table.Column<string>(nullable: true),
Phone = table.Column<string>(nullable: true),
ZipCode = table.Column<string>(nullable: true),
Country = table.Column<string>(nullable: true),
Notes = table.Column<string>(nullable: true),
CreationDate = table.Column<DateTime>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_TestRegisters", x => x.Id);
});
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "TestRegisters");
}
}
}
| 39.95 | 122 | 0.562578 | [
"MIT"
] | AlanLaiTw/jQuery-datatable-server-side-net-core | src/jQueryDatatableServerSideNetCore22/Data/Migrations/20190407193327_Init.cs | 1,600 | C# |
////////////////////////////////////////////////////////////////////////////////
//NUnit tests for "EF Core Provider for LCPI OLE DB"
// IBProvider and Contributors. 16.05.2021.
using System;
using System.Data;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using Microsoft.EntityFrameworkCore;
using NUnit.Framework;
using xdb=lcpi.data.oledb;
namespace EFCore_LcpiOleDb_Tests.General.Work.DBMS.Firebird.V03_0_0.D1.Query.Operators.SET_001.NotEqual.Complete2__objs.Int16.NullableInt32{
////////////////////////////////////////////////////////////////////////////////
using T_DATA1 =System.Int16;
using T_DATA2 =System.Nullable<System.Int32>;
////////////////////////////////////////////////////////////////////////////////
//class TestSet_504__param__01__VV
public static class TestSet_504__param__01__VV
{
private const string c_NameOf__TABLE ="DUAL";
private sealed class MyContext:TestBaseDbContext
{
[Table(c_NameOf__TABLE)]
public sealed class TEST_RECORD
{
[Key]
[Column("ID")]
public System.Int32? TEST_ID { get; set; }
};//class TEST_RECORD
//----------------------------------------------------------------------
public DbSet<TEST_RECORD> testTable { get; set; }
//----------------------------------------------------------------------
public MyContext(xdb.OleDbTransaction tr)
:base(tr)
{
}//MyContext
};//class MyContext
//-----------------------------------------------------------------------
[Test]
public static void Test_001__less()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=3;
T_DATA2 vv2=4;
var recs=db.testTable.Where(r => ((object)vv1) /*OP{*/ != /*}OP*/ ((object)vv2));
int nRecs=0;
foreach(var r in recs)
{
Assert.AreEqual
(0,
nRecs);
++nRecs;
Assert.IsTrue
(r.TEST_ID.HasValue);
Assert.AreEqual
(1,
r.TEST_ID.Value);
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_V_0"));
Assert.AreEqual
(1,
nRecs);
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_001__less
//-----------------------------------------------------------------------
[Test]
public static void Test_002__equal()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=4;
T_DATA2 vv2=4;
var recs=db.testTable.Where(r => ((object)vv1) /*OP{*/ != /*}OP*/ ((object)vv2));
foreach(var r in recs)
{
TestServices.ThrowSelectedRow();
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_V_0"));
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_002__equal
//-----------------------------------------------------------------------
[Test]
public static void Test_003__greater()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=4;
T_DATA2 vv2=3;
var recs=db.testTable.Where(r => ((object)vv1) /*OP{*/ != /*}OP*/ ((object)vv2));
int nRecs=0;
foreach(var r in recs)
{
Assert.AreEqual
(0,
nRecs);
++nRecs;
Assert.IsTrue
(r.TEST_ID.HasValue);
Assert.AreEqual
(1,
r.TEST_ID.Value);
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_V_0"));
Assert.AreEqual
(1,
nRecs);
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_003__greater
//-----------------------------------------------------------------------
[Test]
public static void Test_101__less()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=3;
T_DATA2 vv2=4;
var recs=db.testTable.Where(r => !(((object)vv1) /*OP{*/ != /*}OP*/ ((object)vv2)));
foreach(var r in recs)
{
TestServices.ThrowSelectedRow();
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_0"));
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_101__less
//-----------------------------------------------------------------------
[Test]
public static void Test_102__equal()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=4;
T_DATA2 vv2=4;
var recs=db.testTable.Where(r => !(((object)vv1) /*OP{*/ != /*}OP*/ ((object)vv2)));
int nRecs=0;
foreach(var r in recs)
{
Assert.AreEqual
(0,
nRecs);
++nRecs;
Assert.IsTrue
(r.TEST_ID.HasValue);
Assert.AreEqual
(1,
r.TEST_ID.Value);
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_0"));
Assert.AreEqual
(1,
nRecs);
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_102__equal
//-----------------------------------------------------------------------
[Test]
public static void Test_103__greater()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=4;
T_DATA2 vv2=3;
var recs=db.testTable.Where(r => !(((object)vv1) /*OP{*/ != /*}OP*/ ((object)vv2)));
foreach(var r in recs)
{
TestServices.ThrowSelectedRow();
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_0"));
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_103__greater
//-----------------------------------------------------------------------
[Test]
public static void Test_ZA01NV()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
object vv1__null_obj=null;
T_DATA2 vv2=4;
var recs=db.testTable.Where(r => ((object)(T_DATA1)vv1__null_obj) /*OP{*/ != /*}OP*/ ((object)vv2));
int nRecs=0;
foreach(var r in recs)
{
Assert.AreEqual
(0,
nRecs);
++nRecs;
Assert.IsTrue
(r.TEST_ID.HasValue);
Assert.AreEqual
(1,
r.TEST_ID.Value);
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_V_0"));
Assert.AreEqual
(1,
nRecs);
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_ZA01NV
//-----------------------------------------------------------------------
[Test]
public static void Test_ZA02VN()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=3;
object vv2__null_obj=null;
var recs=db.testTable.Where(r => ((object)vv1) /*OP{*/ != /*}OP*/ ((object)(T_DATA2)vv2__null_obj));
int nRecs=0;
foreach(var r in recs)
{
Assert.AreEqual
(0,
nRecs);
++nRecs;
Assert.IsTrue
(r.TEST_ID.HasValue);
Assert.AreEqual
(1,
r.TEST_ID.Value);
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_V_0"));
Assert.AreEqual
(1,
nRecs);
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_ZA02VN
//-----------------------------------------------------------------------
[Test]
public static void Test_ZA03NN()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
object vv1__null_obj=null;
object vv2__null_obj=null;
var recs=db.testTable.Where(r => ((object)(T_DATA1)vv1__null_obj) /*OP{*/ != /*}OP*/ ((object)(T_DATA2)vv2__null_obj));
foreach(var r in recs)
{
TestServices.ThrowSelectedRow();
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_V_0"));
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_ZA03NN
//-----------------------------------------------------------------------
[Test]
public static void Test_ZB01NV()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
object vv1__null_obj=null;
T_DATA2 vv2=4;
var recs=db.testTable.Where(r => !(((object)(T_DATA1)vv1__null_obj) /*OP{*/ != /*}OP*/ ((object)vv2)));
foreach(var r in recs)
{
TestServices.ThrowSelectedRow();
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_0"));
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_ZB01NV
//-----------------------------------------------------------------------
[Test]
public static void Test_ZB02VN()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
T_DATA1 vv1=3;
object vv2__null_obj=null;
var recs=db.testTable.Where(r => !(((object)vv1) /*OP{*/ != /*}OP*/ ((object)(T_DATA2)vv2__null_obj)));
foreach(var r in recs)
{
TestServices.ThrowSelectedRow();
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_0"));
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_ZB02VN
//-----------------------------------------------------------------------
[Test]
public static void Test_ZB03NN()
{
using(var cn=LocalCnHelper.CreateCn())
{
cn.Open();
using(var tr=cn.BeginTransaction())
{
//insert new record in external transaction
using(var db=new MyContext(tr))
{
object vv1__null_obj=null;
object vv2__null_obj=null;
var recs=db.testTable.Where(r => !(((object)(T_DATA1)vv1__null_obj) /*OP{*/ != /*}OP*/ ((object)(T_DATA2)vv2__null_obj)));
int nRecs=0;
foreach(var r in recs)
{
Assert.AreEqual
(0,
nRecs);
++nRecs;
Assert.IsTrue
(r.TEST_ID.HasValue);
Assert.AreEqual
(1,
r.TEST_ID.Value);
}//foreach r
db.CheckTextOfLastExecutedCommand
(new TestSqlTemplate()
.T("SELECT ").N("d","ID").EOL()
.T("FROM ").N(c_NameOf__TABLE).T(" AS ").N("d").EOL()
.T("WHERE ").P_BOOL("__Exec_V_0"));
Assert.AreEqual
(1,
nRecs);
}//using db
tr.Commit();
}//using tr
}//using cn
}//Test_ZB03NN
};//class TestSet_504__param__01__VV
////////////////////////////////////////////////////////////////////////////////
}//namespace EFCore_LcpiOleDb_Tests.General.Work.DBMS.Firebird.V03_0_0.D1.Query.Operators.SET_001.NotEqual.Complete2__objs.Int16.NullableInt32
| 23.188811 | 142 | 0.523748 | [
"MIT"
] | ibprovider/Lcpi.EFCore.LcpiOleDb | Tests/General/Source/Work/DBMS/Firebird/V03_0_0/D1/Query/Operators/SET_001/NotEqual/Complete2__objs/Int16/NullableInt32/TestSet_504__param__01__VV.cs | 13,266 | C# |
// *****************************************************************************
//
// © Component Factory Pty Ltd 2017. All rights reserved.
// The software and associated documentation supplied hereunder are the
// proprietary information of Component Factory Pty Ltd, 13 Swallows Close,
// Mornington, Vic 3931, Australia and are supplied subject to licence terms.
//
// Version 4.5.0.0 www.ComponentFactory.com
// *****************************************************************************
using System;
using System.Text;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Collections.Generic;
using System.Windows.Forms;
using System.Diagnostics;
namespace ComponentFactory.Krypton.Toolkit
{
/// <summary>
/// View element that can draw a date time picker button.
/// </summary>
public class ViewDrawDateTimeButton : ViewDrawButton
{
#region Enums
/// <summary>
/// Specific the possible glyphs the button can draw.
/// </summary>
public enum DrawDateTimeGlyph
{
/// <summary>
/// Specifies the drop down button glyph.
/// </summary>
DropDownButton,
/// <summary>
/// Specifies the up button glyph.
/// </summary>
UpButton,
/// <summary>
/// Specifies the down button glyph.
/// </summary>
DownButton
}
#endregion
#region Instance Fields
private DrawDateTimeGlyph _glyph;
private KryptonDateTimePicker _dateTimePicker;
private ButtonController _controller;
#endregion
#region Events
/// <summary>
/// Occurs when the button is clicked.
/// </summary>
public event EventHandler Click;
/// <summary>
/// Occurs when the mouse is used to left select the target.
/// </summary>
public event MouseEventHandler MouseSelect;
#endregion
#region Identity
/// <summary>
/// Initialize a new instance of the ViewDrawButton class.
/// </summary>
/// <param name="dateTimePicker">Owning control.</param>
/// <param name="paletteState">Palette source for states.</param>
/// <param name="paletteMetric">Palette source for metric values.</param>
/// <param name="buttonValues">Source for content values.</param>
/// <param name="glyph">Glyph to be drawn.</param>
/// <param name="needPaintHandler">Delegate for requests repainting.</param>
/// <param name="repeat">Should button repeat.</param>
public ViewDrawDateTimeButton(KryptonDateTimePicker dateTimePicker,
IPaletteTriple paletteState,
IPaletteMetric paletteMetric,
IContentValues buttonValues,
DrawDateTimeGlyph glyph,
NeedPaintHandler needPaintHandler,
bool repeat)
: base(paletteState, paletteState, paletteState, paletteState,
paletteMetric, buttonValues, VisualOrientation.Top, false)
{
_dateTimePicker = dateTimePicker;
_glyph = glyph;
// Assign a controller to handle visual interaction
_controller = new ButtonController(this, needPaintHandler);
_controller.BecomesFixed = !repeat;
_controller.Click += new MouseEventHandler(OnButtonClick);
_controller.MouseSelect += new MouseEventHandler(OnButtonMouseSelect);
_controller.Repeat = repeat;
_controller.ClickOnDown = true;
MouseController = _controller;
}
/// <summary>
/// Obtains the String representation of this instance.
/// </summary>
/// <returns>User readable name of the instance.</returns>
public override string ToString()
{
// Return the class name and instance identifier
return "ViewDrawDateTimeButton:" + Id;
}
#endregion
#region RemoveFixed
/// <summary>
/// Remove the fixed appearance of the button.
/// </summary>
public void RemoveFixed()
{
_controller.RemoveFixed();
}
#endregion
#region Layout
/// <summary>
/// Discover the preferred size of the element.
/// </summary>
/// <param name="context">Layout context.</param>
public override Size GetPreferredSize(ViewLayoutContext context)
{
// We want to be as wide as drop down buttons on standard controls
return new Size(SystemInformation.VerticalScrollBarWidth - 2, 0);
}
/// <summary>
/// Perform a layout of the elements.
/// </summary>
/// <param name="context">Layout context.</param>
public override void Layout(ViewLayoutContext context)
{
Debug.Assert(context != null);
// Validate incoming reference
if (context == null) throw new ArgumentNullException("context");
// Layout the button drawing elements using a reduced size
Rectangle beforeRect = context.DisplayRectangle;
switch (_glyph)
{
case DrawDateTimeGlyph.DropDownButton:
context.DisplayRectangle = new Rectangle(beforeRect.X, beforeRect.Y + 1, beforeRect.Width, beforeRect.Height - 2);
break;
case DrawDateTimeGlyph.UpButton:
context.DisplayRectangle = new Rectangle(beforeRect.X, beforeRect.Y + 1, beforeRect.Width, beforeRect.Height - 1);
break;
case DrawDateTimeGlyph.DownButton:
context.DisplayRectangle = new Rectangle(beforeRect.X, beforeRect.Y, beforeRect.Width, beforeRect.Height - 1);
break;
}
base.Layout(context);
// Restore the original size and use that as the actual client rectane
context.DisplayRectangle = beforeRect;
ClientRectangle = beforeRect;
}
#endregion
#region Paint
/// <summary>
/// Perform rendering after child elements are rendered.
/// </summary>
/// <param name="context">Rendering context.</param>
public override void RenderAfter(RenderContext context)
{
switch (_glyph)
{
case DrawDateTimeGlyph.DropDownButton:
context.Renderer.RenderGlyph.DrawInputControlDropDownGlyph(context, ClientRectangle, CurrentPalette.PaletteContent, State);
break;
case DrawDateTimeGlyph.UpButton:
context.Renderer.RenderGlyph.DrawInputControlNumericUpGlyph(context, ClientRectangle, CurrentPalette.PaletteContent, State);
break;
case DrawDateTimeGlyph.DownButton:
context.Renderer.RenderGlyph.DrawInputControlNumericDownGlyph(context, ClientRectangle, CurrentPalette.PaletteContent, State);
break;
}
}
#endregion
#region Protected
/// <summary>
/// Check that the palette and state are correct.
/// </summary>
/// <param name="context">Reference to the view context.</param>
protected override void CheckPaletteState(ViewContext context)
{
PaletteState state = ElementState;
// If the drop down calendar is showing then always draw button as pressed
if (_dateTimePicker.IsDropped)
state = PaletteState.Pressed;
else
{
// If the button is in a normal state (not being tracked or pressed)
if ((ElementState == PaletteState.Normal) ||
(ElementState == PaletteState.CheckedNormal))
{
// If the control is active then use the checked normal appearance, otherwise not active and so use the normal appearance
if (_dateTimePicker.IsActive || (_dateTimePicker.IsFixedActive && (_dateTimePicker.InputControlStyle == InputControlStyle.Standalone)))
state = PaletteState.CheckedNormal;
else
state = PaletteState.Normal;
}
}
ElementState = state;
foreach (ViewBase child in this)
child.ElementState = state;
}
/// <summary>
/// Raises the Click event.
/// </summary>
/// <param name="sender">Source of the event.</param>
/// <param name="e">Event arguments assocaited with the event.</param>
protected void OnButtonClick(object sender, MouseEventArgs e)
{
if (Click != null)
Click(this, e);
}
/// <summary>
/// Raises the MouseSelect event.
/// </summary>
/// <param name="sender">Source of the event.</param>
/// <param name="e">Event arguments assocaited with the event.</param>
protected void OnButtonMouseSelect(object sender, MouseEventArgs e)
{
if (MouseSelect != null)
MouseSelect(this, e);
}
#endregion
}
}
| 38.655738 | 155 | 0.573261 | [
"BSD-3-Clause"
] | ALMMa/Krypton | Source/Krypton Components/ComponentFactory.Krypton.Toolkit/View Draw/ViewDrawDateTimeButton.cs | 9,435 | C# |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace RunWebserverHere
{
public partial class InstallUninstallForm : Form
{
public InstallUninstallForm()
{
InitializeComponent();
}
private void InstallButton_Click(object sender, EventArgs e)
{
try
{
var key = Microsoft.Win32.Registry.ClassesRoot.CreateSubKey("Directory\\shell\\runwebserverhere", Microsoft.Win32.RegistryKeyPermissionCheck.ReadWriteSubTree);
key.SetValue(null, "Run Webserver Here");
var commandKey = key.CreateSubKey("command");
commandKey.SetValue(null, string.Format("\"{0}\" \"%1\"", Assembly.GetExecutingAssembly().Location));
var key2 = Microsoft.Win32.Registry.ClassesRoot.CreateSubKey("Directory\\shell\\runwebserverhere_cfg", Microsoft.Win32.RegistryKeyPermissionCheck.ReadWriteSubTree);
key2.SetValue(null, "Configure Webserver Here");
key2.SetValue("Extended", "");
var commandKey2 = key2.CreateSubKey("command");
commandKey2.SetValue(null, string.Format("\"{0}\" \"%1\" --configure", Assembly.GetExecutingAssembly().Location));
MessageBox.Show("Installed successfully!", "RunWebserverHere", MessageBoxButtons.OK, MessageBoxIcon.Information);
this.DialogResult = System.Windows.Forms.DialogResult.OK;
this.Close();
}
catch
{
MessageBox.Show("Install failed! Try running as admin.", "RunWebserverHere", MessageBoxButtons.OK, MessageBoxIcon.Error);
this.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.Close();
}
}
}
}
| 40.693878 | 180 | 0.638917 | [
"MIT"
] | wgraham17/run-webserver-here | RunWebserverHere/RunWebserverHere/InstallUninstallForm.cs | 1,996 | C# |
using GVFS.Common.NamedPipes;
using GVFS.Common.Tracing;
using System;
using System.Diagnostics;
using System.Threading;
namespace GVFS.Common
{
public partial class GVFSLock
{
private readonly object acquisitionLock = new object();
private readonly ITracer tracer;
private readonly LockHolder currentLockHolder = new LockHolder();
public GVFSLock(ITracer tracer)
{
this.tracer = tracer;
this.Stats = new ActiveGitCommandStats();
}
public ActiveGitCommandStats Stats
{
get;
private set;
}
/// <summary>
/// Allows external callers (non-GVFS) to acquire the lock.
/// </summary>
/// <param name="requestor">The data for the external acquisition request.</param>
/// <param name="existingExternalHolder">The current holder of the lock if the acquisition fails.</param>
/// <returns>True if the lock was acquired, false otherwise.</returns>
public bool TryAcquireLockForExternalRequestor(
NamedPipeMessages.LockData requestor,
out NamedPipeMessages.LockData existingExternalHolder)
{
EventMetadata metadata = new EventMetadata();
EventLevel eventLevel = EventLevel.Verbose;
metadata.Add("LockRequest", requestor.ToString());
metadata.Add("IsElevated", requestor.IsElevated);
existingExternalHolder = null;
try
{
lock (this.acquisitionLock)
{
if (this.currentLockHolder.IsGVFS)
{
metadata.Add("CurrentLockHolder", "GVFS");
metadata.Add("Result", "Denied");
return false;
}
existingExternalHolder = this.GetExternalHolder();
if (existingExternalHolder != null)
{
metadata.Add("CurrentLockHolder", existingExternalHolder.ToString());
metadata.Add("Result", "Denied");
return false;
}
metadata.Add("Result", "Accepted");
eventLevel = EventLevel.Informational;
this.currentLockHolder.AcquireForExternalRequestor(requestor);
this.Stats = new ActiveGitCommandStats();
return true;
}
}
finally
{
this.tracer.RelatedEvent(eventLevel, "TryAcquireLockExternal", metadata);
}
}
/// <summary>
/// Allow GVFS to acquire the lock.
/// </summary>
/// <returns>True if GVFS was able to acquire the lock or if it already held it. False othwerwise.</returns>
public bool TryAcquireLockForGVFS()
{
EventMetadata metadata = new EventMetadata();
try
{
lock (this.acquisitionLock)
{
if (this.currentLockHolder.IsGVFS)
{
return true;
}
NamedPipeMessages.LockData existingExternalHolder = this.GetExternalHolder();
if (existingExternalHolder != null)
{
metadata.Add("CurrentLockHolder", existingExternalHolder.ToString());
metadata.Add("Result", "Denied");
return false;
}
this.currentLockHolder.AcquireForGVFS();
metadata.Add("Result", "Accepted");
return true;
}
}
finally
{
this.tracer.RelatedEvent(EventLevel.Verbose, "TryAcquireLockInternal", metadata);
}
}
public void ReleaseLockHeldByGVFS()
{
lock (this.acquisitionLock)
{
if (!this.currentLockHolder.IsGVFS)
{
throw new InvalidOperationException("Cannot release lock that is not held by GVFS");
}
this.tracer.RelatedEvent(EventLevel.Verbose, nameof(this.ReleaseLockHeldByGVFS), new EventMetadata());
this.currentLockHolder.Release();
}
}
public bool ReleaseLockHeldByExternalProcess(int pid)
{
return this.ReleaseExternalLock(pid, nameof(this.ReleaseLockHeldByExternalProcess));
}
public NamedPipeMessages.LockData GetExternalHolder()
{
NamedPipeMessages.LockData externalHolder;
this.IsLockAvailable(checkExternalHolderOnly: true, existingExternalHolder: out externalHolder);
return externalHolder;
}
public bool IsLockAvailableForExternalRequestor(out NamedPipeMessages.LockData existingExternalHolder)
{
return this.IsLockAvailable(checkExternalHolderOnly: false, existingExternalHolder: out existingExternalHolder);
}
public string GetLockedGitCommand()
{
// In this code path, we don't care if the process terminated without releasing the lock. The calling code
// is asking us about this lock so that it can determine if git was the cause of certain IO events. Even
// if the git process has terminated, the answer to that question does not change.
NamedPipeMessages.LockData currentHolder = this.currentLockHolder.GetExternalHolder();
if (currentHolder != null)
{
return currentHolder.ParsedCommand;
}
return null;
}
public string GetStatus()
{
lock (this.acquisitionLock)
{
if (this.currentLockHolder.IsGVFS)
{
return "Held by GVFS.";
}
NamedPipeMessages.LockData externalHolder = this.GetExternalHolder();
if (externalHolder != null)
{
return string.Format("Held by {0} (PID:{1})", externalHolder.ParsedCommand, externalHolder.PID);
}
}
return "Free";
}
private bool IsLockAvailable(bool checkExternalHolderOnly, out NamedPipeMessages.LockData existingExternalHolder)
{
lock (this.acquisitionLock)
{
if (!checkExternalHolderOnly &&
this.currentLockHolder.IsGVFS)
{
existingExternalHolder = null;
return false;
}
bool externalHolderTerminatedWithoutReleasingLock;
existingExternalHolder = this.currentLockHolder.GetExternalHolder(
out externalHolderTerminatedWithoutReleasingLock);
if (externalHolderTerminatedWithoutReleasingLock)
{
this.ReleaseLockForTerminatedProcess(existingExternalHolder.PID);
existingExternalHolder = null;
}
return existingExternalHolder == null;
}
}
private bool ReleaseExternalLock(int pid, string eventName)
{
lock (this.acquisitionLock)
{
EventMetadata metadata = new EventMetadata();
try
{
if (this.currentLockHolder.IsGVFS)
{
metadata.Add("IsLockedByGVFS", "true");
return false;
}
// We don't care if the process has already terminated. We're just trying to record the info for the last holder.
NamedPipeMessages.LockData previousExternalHolder = this.currentLockHolder.GetExternalHolder();
if (previousExternalHolder == null)
{
metadata.Add("Result", "Failed (no current holder, requested PID=" + pid + ")");
return false;
}
metadata.Add("CurrentLockHolder", previousExternalHolder.ToString());
metadata.Add("IsElevated", previousExternalHolder.IsElevated);
metadata.Add(nameof(RepoMetadata.Instance.EnlistmentId), RepoMetadata.Instance.EnlistmentId);
if (previousExternalHolder.PID != pid)
{
metadata.Add("pid", pid);
metadata.Add("Result", "Failed (wrong PID)");
return false;
}
this.currentLockHolder.Release();
metadata.Add("Result", "Released");
this.Stats.AddStatsToTelemetry(metadata);
return true;
}
finally
{
this.tracer.RelatedEvent(EventLevel.Informational, eventName, metadata, Keywords.Telemetry);
}
}
}
private void ReleaseLockForTerminatedProcess(int pid)
{
this.ReleaseExternalLock(pid, "ExternalLockHolderExited");
}
// The lock release event is a convenient place to record stats about things that happened while a git command was running,
// such as duration/count of object downloads during a git command, cache hits during a git command, etc.
public class ActiveGitCommandStats
{
private Stopwatch lockAcquiredTime;
private long lockHeldExternallyTimeMs;
private long placeholderUpdateTimeMs;
private long parseGitIndexTimeMs;
private int numBlobs;
private long blobDownloadTimeMs;
private int numCommitsAndTrees;
private long commitAndTreeDownloadTimeMs;
private int numSizeQueries;
private long sizeQueryTimeMs;
public ActiveGitCommandStats()
{
this.lockAcquiredTime = Stopwatch.StartNew();
}
public void RecordReleaseExternalLockRequested()
{
this.lockHeldExternallyTimeMs = this.lockAcquiredTime.ElapsedMilliseconds;
}
public void RecordUpdatePlaceholders(long durationMs)
{
this.placeholderUpdateTimeMs = durationMs;
}
public void RecordParseGitIndex(long durationMs)
{
this.parseGitIndexTimeMs = durationMs;
}
public void RecordObjectDownload(bool isBlob, long downloadTimeMs)
{
if (isBlob)
{
Interlocked.Increment(ref this.numBlobs);
Interlocked.Add(ref this.blobDownloadTimeMs, downloadTimeMs);
}
else
{
Interlocked.Increment(ref this.numCommitsAndTrees);
Interlocked.Add(ref this.commitAndTreeDownloadTimeMs, downloadTimeMs);
}
}
public void RecordSizeQuery(long queryTimeMs)
{
Interlocked.Increment(ref this.numSizeQueries);
Interlocked.Add(ref this.sizeQueryTimeMs, queryTimeMs);
}
public void AddStatsToTelemetry(EventMetadata metadata)
{
metadata.Add("DurationMS", this.lockAcquiredTime.ElapsedMilliseconds);
metadata.Add("LockHeldExternallyMS", this.lockHeldExternallyTimeMs);
metadata.Add("ParseGitIndexMS", this.parseGitIndexTimeMs);
metadata.Add("UpdatePlaceholdersMS", this.placeholderUpdateTimeMs);
metadata.Add("BlobsDownloaded", this.numBlobs);
metadata.Add("BlobDownloadTimeMS", this.blobDownloadTimeMs);
metadata.Add("CommitsAndTreesDownloaded", this.numCommitsAndTrees);
metadata.Add("CommitsAndTreesDownloadTimeMS", this.commitAndTreeDownloadTimeMs);
metadata.Add("SizeQueries", this.numSizeQueries);
metadata.Add("SizeQueryTimeMS", this.sizeQueryTimeMs);
}
}
/// <summary>
/// This class manages the state of which process currently owns the GVFS lock. This code is complicated because
/// the lock can be held by us or by an external process, and because the external process that holds the lock
/// can terminate without releasing the lock. If that happens, we implicitly release the lock the next time we
/// check to see who is holding it.
///
/// The goal of this class is to make it impossible for the rest of GVFSLock to read the external holder without being
/// aware of the fact that it could have terminated.
///
/// This class assumes that the caller is handling all synchronization.
/// </summary>
private class LockHolder
{
private NamedPipeMessages.LockData externalLockHolder;
public bool IsFree
{
get { return !this.IsGVFS && this.externalLockHolder == null; }
}
public bool IsGVFS
{
get; private set;
}
public void AcquireForGVFS()
{
if (this.externalLockHolder != null)
{
throw new InvalidOperationException("Cannot acquire for GVFS because there is an external holder");
}
this.IsGVFS = true;
}
public void AcquireForExternalRequestor(NamedPipeMessages.LockData externalLockHolder)
{
if (this.IsGVFS ||
this.externalLockHolder != null)
{
throw new InvalidOperationException("Cannot acquire a lock that is already held");
}
this.externalLockHolder = externalLockHolder;
}
public void Release()
{
this.IsGVFS = false;
this.externalLockHolder = null;
}
public NamedPipeMessages.LockData GetExternalHolder()
{
return this.externalLockHolder;
}
public NamedPipeMessages.LockData GetExternalHolder(out bool externalHolderTerminatedWithoutReleasingLock)
{
externalHolderTerminatedWithoutReleasingLock = false;
if (this.externalLockHolder != null)
{
int pid = this.externalLockHolder.PID;
externalHolderTerminatedWithoutReleasingLock = !GVFSPlatform.Instance.IsProcessActive(pid);
}
return this.externalLockHolder;
}
}
}
}
| 38.027027 | 134 | 0.537055 | [
"MIT"
] | yijunyu/VFSForGit | GVFS/GVFS.Common/GVFSLock.cs | 15,477 | C# |
using RRModels;
using System;
using System.ComponentModel.DataAnnotations;
namespace RRWebUI.Models
{
public class ReviewVM
{
public ReviewVM()
{
}
public ReviewVM(int restaurantId)
{
RestauranId = restaurantId;
}
public ReviewVM(Review review)
{
RestauranId = review.RestaurantId;
Rating = review.Rating;
Description = review.Description;
}
public int RestauranId { get; set; }
[Required]
[Range(0, 100)]
public int Rating { get; set; }
[Required]
public string Description { get; set; }
}
} | 19.970588 | 47 | 0.549337 | [
"MIT"
] | 210503-Reston-NET/Cooks-and-Crooks-Hackathon | RRWebUI/Models/ReviewVM.cs | 681 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
using Datos;
using Metodos;
namespace HumanResourcesSM.Windows
{
/// <summary>
/// Interaction logic for DepartamentoDG.xaml
/// </summary>
public partial class DepartamentoDG : Page
{
MDepartamento Metodos = new MDepartamento();
public DepartamentoDG()
{
InitializeComponent();
}
public void Refresh(string search)
{
List<DDepartamento> items = Metodos.Mostrar(search);
dgOperaciones.ItemsSource = items;
}
private void Window_Loaded(object sender, RoutedEventArgs e)
{
//contentsp.Children.Clear();
Refresh(txtBuscar.Text);
}
private void Button_Click(object sender, RoutedEventArgs e)
{
DepartamentoFrm frmTrab = new DepartamentoFrm();
bool Resp = frmTrab.ShowDialog() ?? false;
Refresh(txtBuscar.Text);
}
private void Button_Click_1(object sender, RoutedEventArgs e)
{
int id = (int)((Button)sender).CommandParameter;
var response = Metodos.Encontrar(id);
DepartamentoFrm frm = new DepartamentoFrm();
frm.Type = TypeForm.Update;
frm.DataFill = response[0];
bool Resp = frm.ShowDialog() ?? false;
Refresh(txtBuscar.Text);
}
private void TextBox_KeyDown(object sender, KeyEventArgs e)
{
Refresh(txtBuscar.Text);
}
private void btnEliminar_Click(object sender, RoutedEventArgs e)
{
MessageBoxResult Resp = MessageBox.Show("¿Seguro que quieres eliminar este item?", "Magicolor", MessageBoxButton.YesNo, MessageBoxImage.Warning);
if (Resp != MessageBoxResult.Yes)
return;
int id = (int)((Button)sender).CommandParameter;
var resp = Metodos.Eliminar(id);
if (resp.Equals("OK"))
{
MAuditoria.Insertar(new DAuditoria(
Menu.ActUsuario.idUsuario,
DAuditoria.Eliminar,
"Se ha eliminado el departamento Nº" + id));
MessageBox.Show("Eliminar completado!", "SwissNet", MessageBoxButton.OK, MessageBoxImage.Information);
}
else MessageBox.Show(resp);
Refresh(txtBuscar.Text);
}
private void txtBuscar_GotFocus(object sender, RoutedEventArgs e)
{
if (txtBuscar.Text == "")
{
txtBucarPlaceH.Text = "";
}
}
private void txtBuscar_LostFocus(object sender, RoutedEventArgs e)
{
if (txtBuscar.Text == "")
{
txtBucarPlaceH.Text = "Buscar...";
}
}
private void txtVer_Click(object sender, RoutedEventArgs e)
{
int id = (int)((Button)sender).CommandParameter;
var response = Metodos.Encontrar(id);
DepartamentoFrm frmTrab = new DepartamentoFrm();
frmTrab.Type = TypeForm.Read;
frmTrab.DataFill = response[0];
MAuditoria.Insertar(new DAuditoria(
Menu.ActUsuario.idUsuario,
DAuditoria.Ver,
"Se ha visualzado el departamento codigo" + response[0].codigo));
bool Resp = frmTrab.ShowDialog() ?? false;
Refresh(txtBuscar.Text);
//MessageBox.Show(response[0].fechaNacimiento.ToString());
}
}
}
| 30.595588 | 158 | 0.550108 | [
"MIT"
] | WallbangJocrod/HumanResourcesSM | HumanResourcesSM/HumanResourcesSM/Windows/DepartamentoDG.xaml.cs | 4,165 | C# |
// Copyright © 2018 Contingent Games.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace Anim8orTransl8or.Gui.Properties
{
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase
{
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default
{
get
{
return defaultInstance;
}
}
}
}
| 42.411765 | 150 | 0.68516 | [
"MIT"
] | Enumer8/Anim8orTransl8or | Gui/Properties/Settings.Designer.cs | 2,166 | C# |
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
using NServiceBus;
using Reservations.Messages.Commands;
using ServiceComposer.AspNetCore;
using System;
using System.Threading.Tasks;
namespace Reservations.ViewModelComposition
{
class ReservationsCheckoutPostHandler : IHandleRequests
{
private readonly IMessageSession messageSession;
public ReservationsCheckoutPostHandler(IMessageSession messageSession)
{
this.messageSession = messageSession;
}
public bool Matches(RouteData routeData, string httpVerb, HttpRequest request)
{
var controller = (string)routeData.Values["controller"];
var action = (string)routeData.Values["action"];
return HttpMethods.IsPost(httpVerb)
&& controller.ToLowerInvariant() == "reservations"
&& action.ToLowerInvariant() == "checkout";
}
public Task Handle(string requestId, dynamic vm, RouteData routeData, HttpRequest request)
{
/*
* In a production envronment if multiple services are interested in the
* same post request the handling logic is much more complex than what we
* are doing in this demo. In this demo both Finance and Reservations need
* to handle the POST to /reservations/checkout. The implementation assumes
* that the host/infrastructure never fails, which is not the case in a
* production environment. In order to make this part safe, which is not the
* scope of this demo asynchronous messaging should be introduced earlier in
* the processing pipeline.
*
* More information: https://milestone.topics.it/2019/05/02/safety-first.html
*/
var message = new CheckoutReservation()
{
ReservationId = new Guid(request.Cookies["reservation-id"])
};
/*
* WARN: destination is hardcoded to reduce demo complexity.
* In a production environment routing should be configured
* at startup by the host/infrastructure.
*/
return messageSession.Send("Reservations.Service", message);
}
}
}
| 39.322034 | 98 | 0.638793 | [
"MIT"
] | mauroservienti/welcome-to-the-state-machine-demos | src/Reservations.ViewModelComposition/ReservationsCheckoutPostHandler.cs | 2,322 | C# |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
namespace Microsoft.Azure.Search.Tests
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Reflection;
using Microsoft.Azure.Search.Models;
using Microsoft.Azure.Search.Tests.Utilities;
using Xunit;
public sealed class CustomAnalyzerTests : SearchTestBase<IndexFixture>
{
[Fact]
public void CanSearchWithCustomAnalyzer()
{
Run(() =>
{
const string CustomAnalyzerName = "my_email_analyzer";
const string CustomCharFilterName = "my_email_filter";
Index index = new Index()
{
Name = SearchTestUtilities.GenerateName(),
Fields = new[]
{
new Field("id", DataType.String) { IsKey = true },
new Field("message", AnalyzerName.Create(CustomAnalyzerName)) { IsSearchable = true }
},
Analyzers = new[]
{
new CustomAnalyzer()
{
Name = CustomAnalyzerName,
Tokenizer = TokenizerName.Standard,
CharFilters = new[] { CharFilterName.Create(CustomCharFilterName) }
}
},
CharFilters = new[] { new PatternReplaceCharFilter(CustomCharFilterName, "@", "_") }
};
Data.GetSearchServiceClient().Indexes.Create(index);
SearchIndexClient indexClient = Data.GetSearchIndexClient(index.Name);
var documents = new[]
{
new Document() { { "id", "1" }, { "message", "My email is [email protected]." } },
new Document() { { "id", "2" }, { "message", "His email is [email protected]." } },
};
indexClient.Documents.Index(IndexBatch.Upload(documents));
SearchTestUtilities.WaitForIndexing();
DocumentSearchResult result = indexClient.Documents.Search("[email protected]");
Assert.Equal("1", result.Results.Single().Document["id"]);
});
}
[Fact]
public void CanUseAllAnalyzerNamesInIndexDefinition()
{
Run(() =>
{
SearchServiceClient client = Data.GetSearchServiceClient();
Index index =
new Index()
{
Name = SearchTestUtilities.GenerateName(),
Fields = new[] { new Field("id", DataType.String) { IsKey = true } }.ToList()
};
AnalyzerName[] allAnalyzers = GetAllExtensibleEnumValues<AnalyzerName>();
int fieldNumber = 0;
// All analyzer names can be set on the analyzer property.
for (int i = 0; i < allAnalyzers.Length; i++)
{
DataType fieldType = (i % 2 == 0) ? DataType.String : DataType.Collection(DataType.String);
index.Fields.Add(new Field($"field{fieldNumber++}", fieldType, allAnalyzers[i]));
}
// Only non-language analyzer names can be set on the searchAnalyzer and indexAnalyzer properties.
// ASSUMPTION: Only language analyzers end in .lucene or .microsoft.
var allNonLanguageAnalyzers =
allAnalyzers.Where(a => !a.ToString().EndsWith(".lucene") && !a.ToString().EndsWith(".microsoft")).ToArray();
for (int i = 0; i < allNonLanguageAnalyzers.Length; i++)
{
DataType fieldType = (i % 2 == 0) ? DataType.String : DataType.Collection(DataType.String);
var field =
new Field($"field{fieldNumber++}", fieldType)
{
IsSearchable = true,
SearchAnalyzer = allNonLanguageAnalyzers[i],
IndexAnalyzer = allNonLanguageAnalyzers[i]
};
index.Fields.Add(field);
}
client.Indexes.Create(index);
});
}
[Fact]
public void CanAnalyze()
{
Run(() =>
{
SearchServiceClient client = Data.GetSearchServiceClient();
Index index = CreateTestIndex();
client.Indexes.Create(index);
var request = new AnalyzeRequest()
{
Text = "One two",
Analyzer = AnalyzerName.Whitespace
};
AnalyzeResult result = client.Indexes.Analyze(index.Name, request);
Assert.Equal(2, result.Tokens.Count);
AssertTokenInfoEqual("One", expectedStartOffset: 0, expectedEndOffset: 3, expectedPosition: 0, actual: result.Tokens[0]);
AssertTokenInfoEqual("two", expectedStartOffset: 4, expectedEndOffset: 7, expectedPosition: 1, actual: result.Tokens[1]);
request = new AnalyzeRequest()
{
Text = "One's <two/>",
Tokenizer = TokenizerName.Whitespace,
TokenFilters = new[] { TokenFilterName.Apostrophe },
CharFilters = new[] { CharFilterName.HtmlStrip }
};
result = client.Indexes.Analyze(index.Name, request);
Assert.Equal(1, result.Tokens.Count);
// End offset is based on the original token, not the one emitted by the filters.
AssertTokenInfoEqual("One", expectedStartOffset: 0, expectedEndOffset: 5, expectedPosition: 0, actual: result.Tokens[0]);
});
}
[Fact]
public void CanAnalyzeWithAllPossibleNames()
{
Run(() =>
{
SearchServiceClient client = Data.GetSearchServiceClient();
Index index = CreateTestIndex();
client.Indexes.Create(index);
AnalyzerName[] allAnalyzerNames = GetAllExtensibleEnumValues<AnalyzerName>();
var requests = allAnalyzerNames.Select(an => new AnalyzeRequest() { Text = "One two", Analyzer = an });
foreach (var req in requests)
{
client.Indexes.Analyze(index.Name, req);
}
TokenizerName[] allTokenizerNames = GetAllExtensibleEnumValues<TokenizerName>();
requests = allTokenizerNames.Select(tn => new AnalyzeRequest() { Text = "One two", Tokenizer = tn });
foreach (var req in requests)
{
client.Indexes.Analyze(index.Name, req);
}
TokenFilterName[] allTokenFilterNames = GetAllExtensibleEnumValues<TokenFilterName>();
CharFilterName[] allCharFilterNames = GetAllExtensibleEnumValues<CharFilterName>();
var request =
new AnalyzeRequest(
"One two",
tokenizer: TokenizerName.Whitespace,
tokenFilters: allTokenFilterNames,
charFilters: allCharFilterNames);
client.Indexes.Analyze(index.Name, request);
});
}
[Fact]
public void AddingCustomAnalyzerThrowsCloudExceptionByDefault()
{
Run(() =>
{
SearchServiceClient client = Data.GetSearchServiceClient();
Index index = CreateTestIndex();
index.Analyzers = new List<Analyzer>() { new StopAnalyzer("a1") };
client.Indexes.Create(index);
index.Analyzers.Add(new StopAnalyzer("a2"));
SearchAssert.ThrowsCloudException(() => client.Indexes.CreateOrUpdate(index), HttpStatusCode.BadRequest);
});
}
[Fact]
public void CanAddCustomAnalyzerWithIndexDowntime()
{
Run(() =>
{
SearchServiceClient client = Data.GetSearchServiceClient();
Index index = CreateTestIndex();
index.Analyzers = new List<Analyzer>() { new StopAnalyzer("a1") };
client.Indexes.Create(index);
index.Analyzers.Add(new StopAnalyzer("a2"));
Index updatedIndex = client.Indexes.CreateOrUpdate(index, allowIndexDowntime: true);
AssertAnalysisComponentsEqual(index, updatedIndex);
});
}
[Fact]
public void CanCreateAllAnalysisComponents()
{
Run(() =>
{
// Declare some custom component names to use with CustomAnalyzer. All other names will be randomly generated.
const string CustomTokenizerName = "my_tokenizer";
const string CustomTokenFilterName = "my_tokenfilter";
const string CustomCharFilterName = "my_charfilter";
Index index = CreateTestIndex();
index.Analyzers = new Analyzer[]
{
new CustomAnalyzer(
SearchTestUtilities.GenerateName(),
CustomTokenizerName,
new TokenFilterName[] { CustomTokenFilterName },
new CharFilterName[] { CustomCharFilterName }),
new CustomAnalyzer(
SearchTestUtilities.GenerateName(),
TokenizerName.EdgeNGram),
new PatternAnalyzer(
SearchTestUtilities.GenerateName(),
lowerCaseTerms: false,
pattern: "abc",
flags: RegexFlags.DotAll,
stopwords: new[] { "the" }),
new StandardAnalyzer(SearchTestUtilities.GenerateName(), maxTokenLength: 100, stopwords: new[] { "the" }),
new StopAnalyzer(SearchTestUtilities.GenerateName(), stopwords: new[] { "the" }),
new StopAnalyzer(SearchTestUtilities.GenerateName())
};
index.Tokenizers = new Tokenizer[]
{
new EdgeNGramTokenizer(CustomTokenizerName, minGram: 1, maxGram: 2), // One custom tokenizer for CustomAnalyzer above.
new EdgeNGramTokenizer(
SearchTestUtilities.GenerateName(),
minGram: 2,
maxGram: 4,
tokenChars: new[] { TokenCharacterKind.Letter }),
new NGramTokenizer(SearchTestUtilities.GenerateName(), minGram: 2, maxGram: 4, tokenChars: new[] { TokenCharacterKind.Letter }),
new ClassicTokenizer(SearchTestUtilities.GenerateName(), maxTokenLength: 100),
new KeywordTokenizerV2(SearchTestUtilities.GenerateName(), maxTokenLength: 100),
new MicrosoftLanguageStemmingTokenizer(
SearchTestUtilities.GenerateName(),
maxTokenLength: 100,
isSearchTokenizer: true,
language: MicrosoftStemmingTokenizerLanguage.Croatian),
new MicrosoftLanguageTokenizer(
SearchTestUtilities.GenerateName(),
maxTokenLength: 100,
isSearchTokenizer: true,
language: MicrosoftTokenizerLanguage.Thai),
new PathHierarchyTokenizerV2(
SearchTestUtilities.GenerateName(),
delimiter: ':',
replacement: '_',
maxTokenLength: 300,
reverseTokenOrder: true,
numberOfTokensToSkip: 2),
new PatternTokenizer(
SearchTestUtilities.GenerateName(),
pattern: ".*",
flags: RegexFlags.Multiline | RegexFlags.Literal,
group: 0),
new StandardTokenizerV2(SearchTestUtilities.GenerateName(), maxTokenLength: 100),
new UaxUrlEmailTokenizer(SearchTestUtilities.GenerateName(), maxTokenLength: 100)
};
index.TokenFilters = new TokenFilter[]
{
new CjkBigramTokenFilter(CustomTokenFilterName), // One custom token filter for CustomAnalyzer above.
new CjkBigramTokenFilter(
SearchTestUtilities.GenerateName(),
ignoreScripts: new[] { CjkBigramTokenFilterScripts.Han },
outputUnigrams: true),
new CjkBigramTokenFilter(SearchTestUtilities.GenerateName()),
new AsciiFoldingTokenFilter(SearchTestUtilities.GenerateName(), preserveOriginal: true),
new AsciiFoldingTokenFilter(SearchTestUtilities.GenerateName()),
new CommonGramTokenFilter(
SearchTestUtilities.GenerateName(),
commonWords: new[] { "hello", "goodbye" },
ignoreCase: true,
useQueryMode: true),
new CommonGramTokenFilter(SearchTestUtilities.GenerateName(), commonWords: new[] { "at" }),
new DictionaryDecompounderTokenFilter(
SearchTestUtilities.GenerateName(),
wordList: new[] { "Schadenfreude" },
minWordSize: 10,
minSubwordSize: 5,
maxSubwordSize: 13,
onlyLongestMatch: true),
new EdgeNGramTokenFilterV2(SearchTestUtilities.GenerateName(), minGram: 2, maxGram: 10, side: EdgeNGramTokenFilterSide.Back),
new ElisionTokenFilter(SearchTestUtilities.GenerateName(), articles: new[] { "a" }),
new ElisionTokenFilter(SearchTestUtilities.GenerateName()),
new KeepTokenFilter(SearchTestUtilities.GenerateName(), keepWords: new[] { "aloha" }, lowerCaseKeepWords: true),
new KeepTokenFilter(SearchTestUtilities.GenerateName(), keepWords: new[] { "e", "komo", "mai" }),
new KeywordMarkerTokenFilter(SearchTestUtilities.GenerateName(), keywords: new[] { "key", "words" }, ignoreCase: true),
new KeywordMarkerTokenFilter(SearchTestUtilities.GenerateName(), keywords: new[] { "essential" }),
new LengthTokenFilter(SearchTestUtilities.GenerateName(), min: 5, max: 10),
new LimitTokenFilter(SearchTestUtilities.GenerateName(), maxTokenCount: 10, consumeAllTokens: true),
new NGramTokenFilterV2(SearchTestUtilities.GenerateName(), minGram: 2, maxGram: 3),
new PatternCaptureTokenFilter(SearchTestUtilities.GenerateName(), patterns: new[] { ".*" }, preserveOriginal: false),
new PatternReplaceTokenFilter(SearchTestUtilities.GenerateName(), pattern: "abc", replacement: "123"),
new PhoneticTokenFilter(SearchTestUtilities.GenerateName(), encoder: PhoneticEncoder.Soundex, replaceOriginalTokens: false),
new ShingleTokenFilter(
SearchTestUtilities.GenerateName(),
maxShingleSize: 10,
minShingleSize: 5,
outputUnigrams: false,
outputUnigramsIfNoShingles: true,
tokenSeparator: " ",
filterToken: "|"),
new SnowballTokenFilter(SearchTestUtilities.GenerateName(), SnowballTokenFilterLanguage.English),
new StemmerOverrideTokenFilter(SearchTestUtilities.GenerateName(), rules: new[] { "ran => run" }),
new StemmerTokenFilter(SearchTestUtilities.GenerateName(), StemmerTokenFilterLanguage.French),
new StopwordsTokenFilter(
SearchTestUtilities.GenerateName(),
stopwords: new[] { "a", "the" },
ignoreCase: true,
removeTrailingStopWords: false),
new StopwordsTokenFilter(
SearchTestUtilities.GenerateName(),
stopwordsList: StopwordsList.Italian,
ignoreCase: true,
removeTrailingStopWords: false),
new SynonymTokenFilter(SearchTestUtilities.GenerateName(), synonyms: new[] { "great, good" }, ignoreCase: true, expand: false),
new TruncateTokenFilter(SearchTestUtilities.GenerateName(), length: 10),
new UniqueTokenFilter(SearchTestUtilities.GenerateName(), onlyOnSamePosition: true),
new UniqueTokenFilter(SearchTestUtilities.GenerateName()),
new WordDelimiterTokenFilter(
SearchTestUtilities.GenerateName(),
generateWordParts: false,
generateNumberParts: false,
catenateWords: true,
catenateNumbers: true,
catenateAll: true,
splitOnCaseChange: false,
preserveOriginal: true,
splitOnNumerics: false,
stemEnglishPossessive: false,
protectedWords: new[] { "protected" })
};
index.CharFilters = new CharFilter[]
{
new MappingCharFilter(CustomCharFilterName, mappings: new[] { "a => b" }), // One custom char filter for CustomAnalyzer above.
new MappingCharFilter(SearchTestUtilities.GenerateName(), mappings: new[] { "s => $", "S => $" }),
new PatternReplaceCharFilter(SearchTestUtilities.GenerateName(), pattern: "abc", replacement: "123")
};
// We have to split up analysis components into two indexes, one where any components with optional properties have defaults that
// are zero or null (default(T)), and another where we need to specify the default values we expect to get back from the REST API.
Func<int, string> generateSimpleName = n => string.Format(CultureInfo.InvariantCulture, "a{0}", n);
int i = 0;
Index indexWithSpecialDefaults = CreateTestIndex();
indexWithSpecialDefaults.Analyzers = new Analyzer[]
{
new PatternAnalyzer(generateSimpleName(i++)),
new StandardAnalyzer(generateSimpleName(i++))
};
indexWithSpecialDefaults.Tokenizers = new Tokenizer[]
{
new EdgeNGramTokenizer(generateSimpleName(i++)),
new NGramTokenizer(generateSimpleName(i++)),
new ClassicTokenizer(generateSimpleName(i++)),
new KeywordTokenizerV2(generateSimpleName(i++)),
new MicrosoftLanguageStemmingTokenizer(generateSimpleName(i++)),
new MicrosoftLanguageTokenizer(generateSimpleName(i++)),
new PathHierarchyTokenizerV2(generateSimpleName(i++)),
new PatternTokenizer(generateSimpleName(i++)),
new StandardTokenizerV2(generateSimpleName(i++)),
new UaxUrlEmailTokenizer(generateSimpleName(i++))
};
indexWithSpecialDefaults.TokenFilters = new TokenFilter[]
{
new DictionaryDecompounderTokenFilter(
generateSimpleName(i++),
wordList: new[] { "Bahnhof" }),
new EdgeNGramTokenFilterV2(generateSimpleName(i++)),
new LengthTokenFilter(generateSimpleName(i++)),
new LimitTokenFilter(generateSimpleName(i++)),
new NGramTokenFilterV2(generateSimpleName(i++)),
new PatternCaptureTokenFilter(generateSimpleName(i++), patterns: new[] { "[a-z]*" }),
new PhoneticTokenFilter(generateSimpleName(i++)),
new ShingleTokenFilter(generateSimpleName(i++)),
new StopwordsTokenFilter(generateSimpleName(i++)),
new SynonymTokenFilter(generateSimpleName(i++), synonyms: new[] { "mutt, canine => dog" }),
new TruncateTokenFilter(generateSimpleName(i++)),
new WordDelimiterTokenFilter(generateSimpleName(i++))
};
i = 0;
Index expectedIndexWithSpecialDefaults = CreateTestIndex();
expectedIndexWithSpecialDefaults.Name = indexWithSpecialDefaults.Name;
expectedIndexWithSpecialDefaults.Analyzers = new Analyzer[]
{
new PatternAnalyzer(generateSimpleName(i++), lowerCaseTerms: true, pattern: @"\W+"),
new StandardAnalyzer(generateSimpleName(i++), maxTokenLength: 255)
};
expectedIndexWithSpecialDefaults.Tokenizers = new Tokenizer[]
{
new EdgeNGramTokenizer(generateSimpleName(i++), minGram: 1, maxGram: 2),
new NGramTokenizer(generateSimpleName(i++), minGram: 1, maxGram: 2),
new ClassicTokenizer(generateSimpleName(i++), maxTokenLength: 255),
new KeywordTokenizerV2(generateSimpleName(i++), maxTokenLength: 256),
new MicrosoftLanguageStemmingTokenizer(
generateSimpleName(i++),
maxTokenLength: 255,
isSearchTokenizer: false,
language: MicrosoftStemmingTokenizerLanguage.English),
new MicrosoftLanguageTokenizer(
generateSimpleName(i++),
maxTokenLength: 255,
isSearchTokenizer: false,
language: MicrosoftTokenizerLanguage.English),
new PathHierarchyTokenizerV2(generateSimpleName(i++), delimiter: '/', replacement: '/', maxTokenLength: 300),
new PatternTokenizer(generateSimpleName(i++), pattern: @"\W+", group: -1),
new StandardTokenizerV2(generateSimpleName(i++), maxTokenLength: 255),
new UaxUrlEmailTokenizer(generateSimpleName(i++), maxTokenLength: 255)
};
expectedIndexWithSpecialDefaults.TokenFilters = new TokenFilter[]
{
new DictionaryDecompounderTokenFilter(
generateSimpleName(i++),
wordList: new[] { "Bahnhof" },
minWordSize: 5,
minSubwordSize: 2,
maxSubwordSize: 15),
new EdgeNGramTokenFilterV2(generateSimpleName(i++), minGram: 1, maxGram: 2, side: EdgeNGramTokenFilterSide.Front),
new LengthTokenFilter(generateSimpleName(i++), max: 300),
new LimitTokenFilter(generateSimpleName(i++), maxTokenCount: 1),
new NGramTokenFilterV2(generateSimpleName(i++), minGram: 1, maxGram: 2),
new PatternCaptureTokenFilter(generateSimpleName(i++), patterns: new[] { "[a-z]*" }, preserveOriginal: true),
new PhoneticTokenFilter(generateSimpleName(i++), encoder: PhoneticEncoder.Metaphone, replaceOriginalTokens: true),
new ShingleTokenFilter(
generateSimpleName(i++),
maxShingleSize: 2,
minShingleSize: 2,
outputUnigrams: true,
tokenSeparator: " ",
filterToken: "_"),
new StopwordsTokenFilter(generateSimpleName(i++), stopwordsList: StopwordsList.English, removeTrailingStopWords: true),
new SynonymTokenFilter(generateSimpleName(i++), synonyms: new[] { "mutt, canine => dog" }, expand: true),
new TruncateTokenFilter(generateSimpleName(i++), length: 300),
new WordDelimiterTokenFilter(
generateSimpleName(i++),
generateWordParts: true,
generateNumberParts: true,
splitOnCaseChange: true,
splitOnNumerics: true,
stemEnglishPossessive: true)
};
// This is to make sure we didn't forget any components in this test.
AssertIndexContainsAllAnalysisComponents(index, indexWithSpecialDefaults);
TestAnalysisComponents(index);
TestAnalysisComponents(indexWithSpecialDefaults, expectedIndexWithSpecialDefaults);
});
}
[Fact]
public void CanUseAllAnalysisComponentNames()
{
Run(() =>
{
TokenizerName[] allTokenizerNames = GetAllExtensibleEnumValues<TokenizerName>();
TokenFilterName[] allTokenFilterNames = GetAllExtensibleEnumValues<TokenFilterName>();
CharFilterName[] allCharFilterNames = GetAllExtensibleEnumValues<CharFilterName>();
var analyzerWithAllTokenFiltersAndCharFilters =
new CustomAnalyzer(SearchTestUtilities.GenerateName(), TokenizerName.Lowercase, allTokenFilterNames, allCharFilterNames);
IEnumerable<Analyzer> analyzersWithAllTokenizers =
allTokenizerNames.Select(tn => new CustomAnalyzer(SearchTestUtilities.GenerateName(), tn));
Index index = CreateTestIndex();
index.Analyzers = new[] { analyzerWithAllTokenFiltersAndCharFilters }.Concat(analyzersWithAllTokenizers).ToArray();
TestAnalysisComponents(index);
});
}
[Fact]
public void CanUseAllRegexFlags()
{
Run(() =>
{
RegexFlags[] allRegexFlags = GetAllExtensibleEnumValues<RegexFlags>();
Func<RegexFlags, Analyzer> createPatternAnalyzer =
rf => new PatternAnalyzer(SearchTestUtilities.GenerateName(), lowerCaseTerms: true, pattern: ".*", flags: rf);
Index index = CreateTestIndex();
index.Analyzers = allRegexFlags.Select(createPatternAnalyzer).ToArray();
TestAnalysisComponents(index);
});
}
[Fact(Skip = "Assert failing")]
public void CanUseAllAnalysisComponentOptions()
{
Run(() =>
{
var tokenizerWithAllTokenCharacterKinds =
new EdgeNGramTokenizer(
SearchTestUtilities.GenerateName(),
minGram: 1,
maxGram: 2,
tokenChars: GetAllEnumValues<TokenCharacterKind>());
Func<MicrosoftTokenizerLanguage, Tokenizer> createMicrosoftLanguageTokenizer = mtl =>
new MicrosoftLanguageTokenizer(
SearchTestUtilities.GenerateName(),
maxTokenLength: 200,
isSearchTokenizer: false,
language: mtl);
IEnumerable<Tokenizer> tokenizersWithAllMicrosoftLanguages =
GetAllEnumValues<MicrosoftTokenizerLanguage>().Select(createMicrosoftLanguageTokenizer);
Func<MicrosoftStemmingTokenizerLanguage, Tokenizer> createMicrosoftStemmingLanguageTokenizer = mtl =>
new MicrosoftLanguageStemmingTokenizer(
SearchTestUtilities.GenerateName(),
maxTokenLength: 200,
isSearchTokenizer: false,
language: mtl);
IEnumerable<Tokenizer> tokenizersWithAllMicrosoftStemmingLanguages =
GetAllEnumValues<MicrosoftStemmingTokenizerLanguage>().Select(createMicrosoftStemmingLanguageTokenizer);
var tokenFilterWithAllCjkScripts =
new CjkBigramTokenFilter(
SearchTestUtilities.GenerateName(),
ignoreScripts: GetAllEnumValues<CjkBigramTokenFilterScripts>(),
outputUnigrams: true);
Func<EdgeNGramTokenFilterSide, TokenFilter> createEdgeNGramTokenFilter =
s => new EdgeNGramTokenFilterV2(SearchTestUtilities.GenerateName(), minGram: 1, maxGram: 2, side: s);
IEnumerable<TokenFilter> tokenFiltersWithAllEdgeNGramSides =
GetAllEnumValues<EdgeNGramTokenFilterSide>().Select(createEdgeNGramTokenFilter);
Func<PhoneticEncoder, TokenFilter> createPhoneticTokenFilter =
pe => new PhoneticTokenFilter(SearchTestUtilities.GenerateName(), encoder: pe, replaceOriginalTokens: false);
IEnumerable<TokenFilter> tokenFiltersWithAllPhoneticEncoders =
GetAllEnumValues<PhoneticEncoder>().Select(createPhoneticTokenFilter);
IEnumerable<TokenFilter> tokenFiltersWithAllSnowballLanguages =
GetAllEnumValues<SnowballTokenFilterLanguage>().Select(l => new SnowballTokenFilter(SearchTestUtilities.GenerateName(), l));
IEnumerable<TokenFilter> tokenFiltersWithAllStemmerLanguages =
GetAllEnumValues<StemmerTokenFilterLanguage>().Select(l => new StemmerTokenFilter(SearchTestUtilities.GenerateName(), l));
Func<StopwordsList, TokenFilter> createStopTokenFilter = l =>
new StopwordsTokenFilter(
SearchTestUtilities.GenerateName(),
stopwordsList: l,
ignoreCase: false,
removeTrailingStopWords: true);
IEnumerable<TokenFilter> tokenFiltersWithAllStopwordLists = GetAllEnumValues<StopwordsList>().Select(createStopTokenFilter);
// Split the tokenizers and token filters into different indexes to get around the 50-item limit.
Index index = CreateTestIndex();
index.Tokenizers =
new[] { tokenizerWithAllTokenCharacterKinds }
.Concat(tokenizersWithAllMicrosoftLanguages)
.Concat(tokenizersWithAllMicrosoftStemmingLanguages).ToArray();
index.TokenFilters =
new[] { tokenFilterWithAllCjkScripts }
.Concat(tokenFiltersWithAllEdgeNGramSides)
.Concat(tokenFiltersWithAllPhoneticEncoders)
.Concat(tokenFiltersWithAllSnowballLanguages)
.Concat(tokenFiltersWithAllStemmerLanguages)
.Concat(tokenFiltersWithAllStopwordLists).ToArray();
TestAnalysisComponents(index);
});
}
private static void AssertIndexContainsAllAnalysisComponents(params Index[] indexes)
{
Func<Index, IEnumerable<Analyzer>> getAnalyzers = index => index.Analyzers ?? Enumerable.Empty<Analyzer>();
Func<Index, IEnumerable<Tokenizer>> getTokenizers = index => index.Tokenizers ?? Enumerable.Empty<Tokenizer>();
Func<Index, IEnumerable<TokenFilter>> getTokenFilters = index => index.TokenFilters ?? Enumerable.Empty<TokenFilter>();
Func<Index, IEnumerable<CharFilter>> getCharFilters = index => index.CharFilters ?? Enumerable.Empty<CharFilter>();
Func<Index, IEnumerable<Type>> getAnalysisTypesPresentInIndex = index =>
getAnalyzers(index).Select(a => a.GetType())
.Concat(getTokenizers(index).Select(t => t.GetType()))
.Concat(getTokenFilters(index).Select(tf => tf.GetType()))
.Concat(getCharFilters(index).Select(c => c.GetType()));
var analysisTypesPresentInAllIndexes =
from index in indexes
from t in getAnalysisTypesPresentInIndex(index)
select t;
// Count how many instances of each type appear in the given index definitions.
var analysisTypeCounts =
from t in analysisTypesPresentInAllIndexes
group t by t into typeGroup
select new { Type = typeGroup.Key, Count = typeGroup.Count() };
Dictionary<Type, int> instanceCountMap = analysisTypeCounts.ToDictionary(tc => tc.Type, tc => tc.Count);
IEnumerable<Type> allAnalysisComponentTypes = typeof(Index).GetTypeInfo().Assembly.ExportedTypes.Where((Type type) => IsAnalysisComponentType(type) && !IsDeprecatedType(type));
IEnumerable<Type> missingTypes = allAnalysisComponentTypes.Where(t => !IsTypePresentAtLeastOnce(instanceCountMap, t));
if (missingTypes.Any())
{
const string MessageFormat =
"Logic error in test. Test must include at least one case for each analysis component type. Missing types:{0}{0}{1}";
string message =
String.Format(
CultureInfo.InvariantCulture,
MessageFormat,
Environment.NewLine,
String.Join(Environment.NewLine, missingTypes.Select(t => t.Name)));
Assert.True(false, message);
}
}
private static bool IsAnalysisComponentType(Type candidateType)
{
Type baseType = candidateType.GetTypeInfo().BaseType;
return
baseType == typeof(Analyzer) || baseType == typeof(Tokenizer) ||
baseType == typeof(TokenFilter) || baseType == typeof(CharFilter);
}
private static bool IsDeprecatedType(Type candidateType)
{
return candidateType.GetTypeInfo().GetCustomAttribute<ObsoleteAttribute>() != null;
}
private static bool IsTypePresentAtLeastOnce(Dictionary<Type, int> instanceCountMap, Type analysisType)
{
int count;
if (instanceCountMap.TryGetValue(analysisType, out count))
{
return count >= 1;
}
return false;
}
private static Index CreateTestIndex() => IndexManagementTests.CreateTestIndex();
private static void AssertTokenInfoEqual(
string expectedToken,
int expectedStartOffset,
int expectedEndOffset,
int expectedPosition,
TokenInfo actual)
{
Assert.NotNull(actual);
Assert.Equal(expectedToken, actual.Token);
Assert.Equal(expectedStartOffset, actual.StartOffset);
Assert.Equal(expectedEndOffset, actual.EndOffset);
Assert.Equal(expectedPosition, actual.Position);
}
private static void AssertAnalysisComponentsEqual(Index expected, Index actual)
{
// Compare analysis components directly so that test failures show better comparisons.
Assert.Equal(expected.Analyzers?.Count ?? 0, actual.Analyzers?.Count ?? 0);
for (int i = 0; i < expected.Analyzers?.Count; i++)
{
Assert.Equal(expected.Analyzers[i], actual.Analyzers[i], new ModelComparer<Analyzer>());
}
Assert.Equal(expected.Tokenizers?.Count ?? 0, actual.Tokenizers?.Count ?? 0);
for (int i = 0; i < expected.Tokenizers?.Count; i++)
{
Assert.Equal(expected.Tokenizers[i], actual.Tokenizers[i], new ModelComparer<Tokenizer>());
}
Assert.Equal(expected.TokenFilters?.Count ?? 0, actual.TokenFilters?.Count ?? 0);
for (int i = 0; i < expected.TokenFilters?.Count; i++)
{
Assert.Equal(expected.TokenFilters[i], actual.TokenFilters[i], new ModelComparer<TokenFilter>());
}
Assert.Equal(expected.CharFilters?.Count ?? 0, actual.CharFilters?.Count ?? 0);
for (int i = 0; i < expected.CharFilters?.Count; i++)
{
Assert.Equal(expected.CharFilters[i], actual.CharFilters[i], new ModelComparer<CharFilter>());
}
}
private static T[] GetAllExtensibleEnumValues<T>() where T : ExtensibleEnum<T> =>
(from field in typeof(T).GetFields()
where field.FieldType == typeof(T) && field.IsStatic
select field.GetValue(null)).Cast<T>().ToArray(); // Force eager evaluation.
private static T[] GetAllEnumValues<T>() where T : struct => Enum.GetValues(typeof(T)).Cast<T>().ToArray();
// If the Index has too many analysis components, split it up.
private static IEnumerable<Index> SplitIndex(Index index)
{
IEnumerable<IEnumerable<Analyzer>> analyzerGroups = SplitAnalysisComponents(index.Analyzers);
IEnumerable<IEnumerable<Tokenizer>> tokenizerGroups = SplitAnalysisComponents(index.Tokenizers);
IEnumerable<IEnumerable<TokenFilter>> tokenFilterGroups = SplitAnalysisComponents(index.TokenFilters);
IEnumerable<IEnumerable<CharFilter>> charFilterGroups = SplitAnalysisComponents(index.CharFilters);
int biggestGroupSize =
new[] { analyzerGroups.Count(), tokenizerGroups.Count(), tokenFilterGroups.Count(), charFilterGroups.Count() }.Max();
if (biggestGroupSize == 1)
{
// No splitting necessary; Return the original index.
yield return index;
yield break;
}
foreach (var analyzers in analyzerGroups)
{
Index smallerIndex = CreateTestIndex();
smallerIndex.Analyzers = analyzers.ToArray();
yield return smallerIndex;
}
foreach (var tokenizers in tokenizerGroups)
{
Index smallerIndex = CreateTestIndex();
smallerIndex.Tokenizers = tokenizers.ToArray();
yield return smallerIndex;
}
foreach (var tokenFilters in tokenFilterGroups)
{
Index smallerIndex = CreateTestIndex();
smallerIndex.TokenFilters = tokenFilters.ToArray();
yield return smallerIndex;
}
foreach (var charFilters in charFilterGroups)
{
Index smallerIndex = CreateTestIndex();
smallerIndex.CharFilters = charFilters.ToArray();
yield return smallerIndex;
}
}
private static IEnumerable<IEnumerable<T>> SplitAnalysisComponents<T>(IEnumerable<T> components)
{
const int AnalysisComponentLimit = 50;
components = components ?? Enumerable.Empty<T>();
if (components.Count() <= AnalysisComponentLimit)
{
yield return components;
yield break;
}
while (components.Any())
{
yield return components.Take(AnalysisComponentLimit);
components = components.Skip(AnalysisComponentLimit);
}
}
private void TestAnalysisComponents(Index index, Index expectedIndex = null)
{
expectedIndex = expectedIndex ?? index;
SearchServiceClient client = Data.GetSearchServiceClient();
foreach (var testCase in SplitIndex(index).Zip(SplitIndex(expectedIndex), (i, e) => new { Index = i, ExpectedIndex = e }))
{
Index createdIndex = client.Indexes.Create(testCase.Index);
try
{
AssertAnalysisComponentsEqual(testCase.ExpectedIndex, createdIndex);
}
finally
{
client.Indexes.Delete(createdIndex.Name);
}
}
}
}
}
| 49.101796 | 188 | 0.560659 | [
"MIT"
] | Skycloudmedua/azure-sdk-for-net | src/SDKs/Search/DataPlane/Search.Tests/Tests/CustomAnalyzerTests.cs | 41,002 | C# |
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace BlazorServerDemoApp
{
public class Program
{
public static void Main(string[] args)
{
CreateHostBuilder(args).Build().Run();
}
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseStartup<Startup>();
});
}
}
| 25.962963 | 70 | 0.649073 | [
"MIT"
] | jyotish1977/BlazorScheduler | BlazorServerDemoApp/Program.cs | 701 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NDtw.Preprocessing;
using NDtw.FeatureVector;
namespace NDtw
{
public class SeriesVariable
{
private readonly IList<FeatureVector<IFeatureVectorsData>> _x;
private readonly IList<FeatureVector<IFeatureVectorsData>> _y;
private readonly string _variableName;
private readonly IPreprocessorGeneric _preprocessor;
private readonly double _weight;
public SeriesVariable(IList<IFeatureVectorsData> x, IList<IFeatureVectorsData> y, string variableName = null, IPreprocessorGeneric preprocessor = null, double weight = 1)
{
_x = new List<FeatureVector<IFeatureVectorsData>>();
_y = new List<FeatureVector<IFeatureVectorsData>>();
foreach (var item in x)
{
var feature = new FeatureVector<IFeatureVectorsData>(item);
_x.Add(feature);
}
foreach (var item in y)
{
var feature = new FeatureVector<IFeatureVectorsData>(item);
_y.Add(feature);
}
_variableName = variableName;
_preprocessor = preprocessor;
_weight = weight;
}
public string VariableName
{
get { return _variableName; }
}
public double Weight
{
get { return _weight; }
}
public IList<FeatureVector<IFeatureVectorsData>> OriginalXSeries
{
get { return _x; }
}
public IList<FeatureVector<IFeatureVectorsData>> OriginalYSeries
{
get { return _y; }
}
public IList<FeatureVector<IFeatureVectorsData>> GetPreprocessedXSeries()
{
if (_preprocessor == null)
return _x;
return _preprocessor.Preprocess(_x);
}
public IList<FeatureVector<IFeatureVectorsData>> GetPreprocessedYSeries()
{
if (_preprocessor == null)
return _y;
return _preprocessor.Preprocess(_y);
}
}
}
| 29.135135 | 178 | 0.591837 | [
"BSD-3-Clause"
] | ourochan/candescantNUI | NDtw/SeriesVariable.cs | 2,158 | C# |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SalesApp.Core.Logging;
using SalesApp.Core.Services.Database.Models;
namespace SalesApp.Core.Services.Database
{
public class QueryRunner
{
private ILog Logger = LogManager.Get(typeof(QueryRunner));
public QueryRunner()
{
}
public async Task<List<T>> RunQuery<T>(string query) where T : ModelBase, new()
{
try
{
List<T> list = null;
await DataAccess.Instance.Connection.RunInTransactionAsync(
async tran =>
{
DataAccess.Instance.StartTransaction(tran);
list = await DataAccess.Instance.SelectQueryAsync<T>(query);
});
DataAccess.Instance.CommitTransaction();
if (list.Count < 1)
{
return new List<T>();
}
else
{
return list;
}
}
catch (Exception e)
{
Logger.Debug(e);
return new List<T>();
}
}
}
} | 26.404255 | 87 | 0.476229 | [
"Apache-2.0"
] | mkopadev/salesapp | SalesApp.Core/Services/Database/QueryRunner.cs | 1,243 | C# |
using System;
using System.Collections.Generic;
namespace DurandalAuth.Domain.Models
{
public partial class gb_fp_waste_liq_data
{
public decimal bod5_wastewater { get; set; }
public decimal bod5_wastewater_entry { get; set; }
public decimal ch4_bod5 { get; set; }
public decimal percent_anaerobic { get; set; }
public string units { get; set; }
public string units_type { get; set; }
public string version_type { get; set; }
public string version_name { get; set; }
public string treatment_id { get; set; }
public virtual bill_type bill_type { get; set; }
public virtual bill_unit bill_unit { get; set; }
public virtual gb_fp_versions gb_fp_versions { get; set; }
}
}
| 35.363636 | 66 | 0.655527 | [
"MIT"
] | benitazz/AlcmSolutions | DurandalAuth.Domain/Models/gb_fp_waste_liq_data.cs | 778 | C# |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Script.Serialization;
using System.Xml.Serialization;
using NewLife;
using NewLife.Collections;
using NewLife.Log;
using NewLife.Reflection;
using NewLife.Threading;
namespace XCode.Membership
{
/// <summary>菜单</summary>
[EntityFactory(typeof(MenuFactory))]
public partial class Menu : EntityTree<Menu>, IMenu
{
#region 对象操作
static Menu()
{
// 引发内部
new Menu();
//EntityFactory.Register(typeof(Menu), new MenuFactory());
//ObjectContainer.Current.AutoRegister<IMenuFactory, MenuFactory>();
Meta.Modules.Add<UserModule>();
Meta.Modules.Add<TimeModule>();
Meta.Modules.Add<IPModule>();
}
/// <summary>验证数据,通过抛出异常的方式提示验证失败。</summary>
/// <param name="isNew">是否新数据</param>
public override void Valid(Boolean isNew)
{
if (String.IsNullOrEmpty(Name)) throw new ArgumentNullException(__.Name, _.Name.DisplayName + "不能为空!");
base.Valid(isNew);
if (Icon == "") Icon = null;
SavePermission();
}
/// <summary>已重载。调用Save时写日志,而调用Insert和Update时不写日志</summary>
/// <returns></returns>
public override Int32 Save()
{
// 先处理一次,否则可能因为别的字段没有修改而没有脏数据
SavePermission();
//if (Icon.IsNullOrWhiteSpace()) Icon = "";
// 更改日志保存顺序,先保存才能获取到id
var action = "添加";
var isNew = IsNullKey;
if (!isNew)
{
// 没有修改时不写日志
if (!HasDirty) return 0;
action = "修改";
// 必须提前写修改日志,否则修改后脏数据失效,保存的日志为空
LogProvider.Provider.WriteLog(action, this);
}
var result = base.Save();
if (isNew) LogProvider.Provider.WriteLog(action, this);
return result;
}
/// <summary>删除。</summary>
/// <returns></returns>
protected override Int32 OnDelete()
{
var err = "";
try
{
// 递归删除子菜单
var rs = 0;
using var ts = Meta.CreateTrans();
rs += base.OnDelete();
var ms = Childs;
if (ms != null && ms.Count > 0)
{
foreach (var item in ms)
{
rs += item.Delete();
}
}
ts.Commit();
return rs;
}
catch (Exception ex)
{
err = ex.Message;
throw;
}
finally
{
LogProvider.Provider.WriteLog("删除", this, err);
}
}
/// <summary>加载权限字典</summary>
protected override void OnLoad()
{
base.OnLoad();
// 构造权限字典
LoadPermission();
}
/// <summary>如果Permission被修改,则重新加载</summary>
/// <param name="fieldName"></param>
protected override void OnPropertyChanged(String fieldName)
{
base.OnPropertyChanged(fieldName);
if (fieldName == __.Permission) LoadPermission();
}
#endregion
#region 扩展属性
/// <summary></summary>
public String Url2 => Url?.Replace("~", "");
/// <summary>父菜单名</summary>
public virtual String ParentMenuName { get => Parent?.Name; set { } }
/// <summary>必要的菜单。必须至少有角色拥有这些权限,如果没有则自动授权给系统角色</summary>
internal static Int32[] Necessaries
{
get
{
// 找出所有的必要菜单,如果没有,则表示全部都是必要
var list = FindAllWithCache();
var list2 = list.Where(e => e.Necessary).ToList();
if (list2.Count > 0) list = list2;
return list.Select(e => e.ID).ToArray();
}
}
/// <summary>友好名称。优先显示名</summary>
public String FriendName => DisplayName.IsNullOrWhiteSpace() ? Name : DisplayName;
#endregion
#region 扩展查询
/// <summary>根据编号查找</summary>
/// <param name="id"></param>
/// <returns></returns>
public static Menu FindByID(Int32 id)
{
if (id <= 0) return null;
return Meta.Cache.Find(e => e.ID == id);
}
/// <summary>根据名字查找</summary>
/// <param name="name">名称</param>
/// <returns></returns>
public static Menu FindByName(String name) => Meta.Cache.Find(e => e.Name.EqualIgnoreCase(name));
/// <summary>根据全名查找</summary>
/// <param name="name">全名</param>
/// <returns></returns>
public static Menu FindByFullName(String name) => Meta.Cache.Find(e => e.FullName.EqualIgnoreCase(name));
/// <summary>根据Url查找</summary>
/// <param name="url"></param>
/// <returns></returns>
public static Menu FindByUrl(String url) => Meta.Cache.Find(e => e.Url.EqualIgnoreCase(url));
/// <summary>根据名字查找,支持路径查找</summary>
/// <param name="name">名称</param>
/// <returns></returns>
public static Menu FindForName(String name)
{
var entity = FindByName(name);
if (entity != null) return entity;
return Root.FindByPath(name, _.Name, _.DisplayName);
}
/// <summary>查找指定菜单的子菜单</summary>
/// <param name="id"></param>
/// <returns></returns>
public static List<Menu> FindAllByParentID(Int32 id) => Meta.Cache.FindAll(e => e.ParentID == id).OrderByDescending(e => e.Sort).ThenBy(e => e.ID).ToList();
/// <summary>取得当前角色的子菜单,有权限、可显示、排序</summary>
/// <param name="filters"></param>
/// <param name="inclInvisible">包含不可见菜单</param>
/// <returns></returns>
public IList<IMenu> GetSubMenus(Int32[] filters, Boolean inclInvisible = false)
{
var list = Childs;
if (list == null || list.Count < 1) return new List<IMenu>();
if (!inclInvisible) list = list.Where(e => e.Visible).ToList();
if (list == null || list.Count < 1) return new List<IMenu>();
return list.Where(e => filters.Contains(e.ID)).Cast<IMenu>().ToList();
}
#endregion
#region 扩展操作
/// <summary>添加子菜单</summary>
/// <param name="name"></param>
/// <param name="displayName"></param>
/// <param name="fullName"></param>
/// <param name="url"></param>
/// <returns></returns>
public IMenu Add(String name, String displayName, String fullName, String url)
{
var entity = new Menu
{
Name = name,
DisplayName = displayName,
FullName = fullName,
Url = url,
ParentID = ID,
Visible = ID == 0 || displayName != null
};
entity.Save();
return entity;
}
#endregion
#region 扩展权限
/// <summary>可选权限子项</summary>
[XmlIgnore, ScriptIgnore, IgnoreDataMember]
public Dictionary<Int32, String> Permissions { get; set; } = new Dictionary<Int32, String>();
private void LoadPermission()
{
Permissions.Clear();
if (String.IsNullOrEmpty(Permission)) return;
var dic = Permission.SplitAsDictionary("#", ",");
foreach (var item in dic)
{
var resid = item.Key.ToInt();
Permissions[resid] = item.Value;
}
}
private void SavePermission()
{
// 不能这样子直接清空,因为可能没有任何改变,而这么做会两次改变脏数据,让系统以为有改变
//Permission = null;
if (Permissions.Count <= 0)
{
//Permission = null;
SetItem(__.Permission, null);
return;
}
var sb = Pool.StringBuilder.Get();
// 根据资源按照从小到大排序一下
foreach (var item in Permissions.OrderBy(e => e.Key))
{
if (sb.Length > 0) sb.Append(',');
sb.AppendFormat("{0}#{1}", item.Key, item.Value);
}
SetItem(__.Permission, sb.Put(true));
}
#endregion
#region 日志
///// <summary>写日志</summary>
///// <param name="action">操作</param>
///// <param name="remark">备注</param>
//public static void WriteLog(String action, String remark) => LogProvider.Provider.WriteLog(typeof(Menu), action, remark);
#endregion
#region 辅助
/// <summary>已重载。</summary>
/// <returns></returns>
public override String ToString()
{
var path = GetFullPath(true, "\\", e => e.FriendName);
if (!path.IsNullOrEmpty()) return path;
return FriendName;
}
#endregion
#region IMenu 成员
/// <summary>取得全路径的实体,由上向下排序</summary>
/// <param name="includeSelf">是否包含自己</param>
/// <param name="separator">分隔符</param>
/// <param name="func">回调</param>
/// <returns></returns>
String IMenu.GetFullPath(Boolean includeSelf, String separator, Func<IMenu, String> func)
{
Func<Menu, String> d = null;
if (func != null) d = item => func(item);
return GetFullPath(includeSelf, separator, d);
}
//IMenu IMenu.Add(String name, String displayName, String fullName, String url) => Add(name, displayName, fullName, url);
/// <summary>父菜单</summary>
IMenu IMenu.Parent => Parent;
/// <summary>子菜单</summary>
IList<IMenu> IMenu.Childs => Childs.OfType<IMenu>().ToList();
/// <summary>子孙菜单</summary>
IList<IMenu> IMenu.AllChilds => AllChilds.OfType<IMenu>().ToList();
/// <summary>根据层次路径查找</summary>
/// <param name="path">层次路径</param>
/// <returns></returns>
IMenu IMenu.FindByPath(String path) => FindByPath(path, _.Name, _.DisplayName);
#endregion
#region 菜单工厂
/// <summary>菜单工厂</summary>
public class MenuFactory : DefaultEntityFactory, IMenuFactory
{
#region IMenuFactory 成员
IMenu IMenuFactory.Root => Root;
/// <summary>根据编号找到菜单</summary>
/// <param name="id"></param>
/// <returns></returns>
IMenu IMenuFactory.FindByID(Int32 id) => FindByID(id);
/// <summary>根据Url找到菜单</summary>
/// <param name="url"></param>
/// <returns></returns>
IMenu IMenuFactory.FindByUrl(String url) => FindByUrl(url);
/// <summary>根据全名找到菜单</summary>
/// <param name="fullName"></param>
/// <returns></returns>
IMenu IMenuFactory.FindByFullName(String fullName) => FindByFullName(fullName);
/// <summary>获取指定菜单下,当前用户有权访问的子菜单。</summary>
/// <param name="menuid"></param>
/// <param name="user"></param>
/// <param name="inclInvisible">是否包含不可见菜单</param>
/// <returns></returns>
IList<IMenu> IMenuFactory.GetMySubMenus(Int32 menuid, IUser user, Boolean inclInvisible)
{
var factory = this as IMenuFactory;
var root = factory.Root;
// 当前用户
//var user = ManageProvider.Provider.Current as IUser;
var rs = user?.Roles;
if (rs == null || rs.Length == 0) return new List<IMenu>();
IMenu menu = null;
// 找到菜单
if (menuid > 0) menu = FindByID(menuid);
if (menu == null)
{
menu = root;
if (menu == null || menu.Childs == null || menu.Childs.Count < 1) return new List<IMenu>();
}
return menu.GetSubMenus(rs.SelectMany(e => e.Resources).ToArray(), inclInvisible);
}
/// <summary>扫描命名空间下的控制器并添加为菜单</summary>
/// <param name="rootName">根菜单名称,所有菜单附属在其下</param>
/// <param name="asm">要扫描的程序集</param>
/// <param name="nameSpace">要扫描的命名空间</param>
/// <returns></returns>
public virtual IList<IMenu> ScanController(String rootName, Assembly asm, String nameSpace)
{
var list = new List<IMenu>();
var mf = this as IMenuFactory;
// 所有控制器
var types = asm.GetTypes().Where(e => e.Name.EndsWith("Controller") && e.Namespace == nameSpace).ToList();
if (types.Count == 0) return list;
// 如果根菜单不存在,则添加
var r = Root as IMenu;
var root = mf.FindByFullName(nameSpace);
if (root == null) root = r.FindByPath(rootName);
//if (root == null) root = r.Childs.FirstOrDefault(e => e.Name.EqualIgnoreCase(rootName));
//if (root == null) root = r.Childs.FirstOrDefault(e => e.Url.EqualIgnoreCase("~/" + rootName));
if (root == null)
{
root = r.Add(rootName, null, nameSpace, "~/" + rootName);
list.Add(root);
}
if (root.FullName != nameSpace)
{
root.FullName = nameSpace;
(root as IEntity).Save();
}
var ms = new List<IMenu>();
// 遍历该程序集所有类型
foreach (var type in types)
{
var name = type.Name.TrimEnd("Controller");
var url = root.Url;
var node = root;
// 添加Controller
var controller = node.FindByPath(name);
if (controller == null)
{
url += "/" + name;
controller = FindByUrl(url);
if (controller == null)
{
// DisplayName特性作为中文名
controller = node.Add(name, type.GetDisplayName(), type.FullName, url);
//list.Add(controller);
}
}
if (controller.FullName.IsNullOrEmpty()) controller.FullName = type.FullName;
if (controller.Remark.IsNullOrEmpty()) controller.Remark = type.GetDescription();
ms.Add(controller);
list.Add(controller);
// 反射调用控制器的方法来获取动作
var func = type.GetMethodEx("ScanActionMenu");
if (func == null) continue;
// 由于控制器使用IOC,无法直接实例化控制器,需要给各个参数传入空
var ctor = type.GetConstructors()?.FirstOrDefault();
var ctrl = ctor.Invoke(new Object[ctor.GetParameters().Length]);
//var ctrl = type.CreateInstance();
var acts = func.As<Func<IMenu, IDictionary<MethodInfo, Int32>>>(ctrl).Invoke(controller);
if (acts == null || acts.Count == 0) continue;
// 可选权限子项
controller.Permissions.Clear();
// 添加该类型下的所有Action作为可选权限子项
foreach (var item in acts)
{
var method = item.Key;
var dn = method.GetDisplayName();
if (!dn.IsNullOrEmpty()) dn = dn.Replace("{type}", (controller as Menu)?.FriendName);
var pmName = !dn.IsNullOrEmpty() ? dn : method.Name;
if (item.Value <= (Int32)PermissionFlags.Delete) pmName = ((PermissionFlags)item.Value).GetDescription();
controller.Permissions[item.Value] = pmName;
}
// 排序
if (controller.Sort == 0)
{
var pi = type.GetPropertyEx("MenuOrder");
if (pi != null) controller.Sort = pi.GetValue(null).ToInt();
}
}
for (var i = 0; i < ms.Count; i++)
{
(ms[i] as IEntity).Save();
}
// 如果新增了菜单,需要检查权限
if (list.Count > 0)
{
ThreadPoolX.QueueUserWorkItem(() =>
{
XTrace.WriteLine("新增了菜单,需要检查权限");
var fact = ManageProvider.GetFactory<IRole>();
fact.EntityType.Invoke("CheckRole");
});
}
return list;
}
#endregion
}
#endregion
}
/// <summary>菜单工厂接口</summary>
public interface IMenuFactory
{
/// <summary>根菜单</summary>
IMenu Root { get; }
/// <summary>根据编号找到菜单</summary>
/// <param name="id"></param>
/// <returns></returns>
IMenu FindByID(Int32 id);
/// <summary>根据全名找到菜单</summary>
/// <param name="fullName"></param>
/// <returns></returns>
IMenu FindByFullName(String fullName);
/// <summary>根据Url找到菜单</summary>
/// <param name="url"></param>
/// <returns></returns>
IMenu FindByUrl(String url);
/// <summary>获取指定菜单下,当前用户有权访问的子菜单。</summary>
/// <param name="menuid"></param>
/// <param name="user"></param>
/// <param name="inclInvisible"></param>
/// <returns></returns>
IList<IMenu> GetMySubMenus(Int32 menuid, IUser user, Boolean inclInvisible);
/// <summary>扫描命名空间下的控制器并添加为菜单</summary>
/// <param name="rootName"></param>
/// <param name="asm"></param>
/// <param name="nameSpace"></param>
/// <returns></returns>
IList<IMenu> ScanController(String rootName, Assembly asm, String nameSpace);
}
/// <summary>菜单接口</summary>
public partial interface IMenu
{
#region 属性
/// <summary>编号</summary>
Int32 ID { get; set; }
/// <summary>名称</summary>
String Name { get; set; }
/// <summary>显示名</summary>
String DisplayName { get; set; }
/// <summary>全名</summary>
String FullName { get; set; }
/// <summary>父编号</summary>
Int32 ParentID { get; set; }
/// <summary>链接</summary>
String Url { get; set; }
/// <summary>排序</summary>
Int32 Sort { get; set; }
/// <summary>图标</summary>
String Icon { get; set; }
/// <summary>可见</summary>
Boolean Visible { get; set; }
/// <summary>必要。必要的菜单,必须至少有角色拥有这些权限,如果没有则自动授权给系统角色</summary>
Boolean Necessary { get; set; }
/// <summary>权限子项。逗号分隔,每个权限子项名值竖线分隔</summary>
String Permission { get; set; }
/// <summary>扩展1</summary>
Int32 Ex1 { get; set; }
/// <summary>扩展2</summary>
Int32 Ex2 { get; set; }
/// <summary>扩展3</summary>
Double Ex3 { get; set; }
/// <summary>扩展4</summary>
String Ex4 { get; set; }
/// <summary>扩展5</summary>
String Ex5 { get; set; }
/// <summary>扩展6</summary>
String Ex6 { get; set; }
/// <summary>创建者</summary>
String CreateUser { get; set; }
/// <summary>创建用户</summary>
Int32 CreateUserID { get; set; }
/// <summary>创建地址</summary>
String CreateIP { get; set; }
/// <summary>创建时间</summary>
DateTime CreateTime { get; set; }
/// <summary>更新者</summary>
String UpdateUser { get; set; }
/// <summary>更新用户</summary>
Int32 UpdateUserID { get; set; }
/// <summary>更新地址</summary>
String UpdateIP { get; set; }
/// <summary>更新时间</summary>
DateTime UpdateTime { get; set; }
/// <summary>备注</summary>
String Remark { get; set; }
#endregion
/// <summary>取得全路径的实体,由上向下排序</summary>
/// <param name="includeSelf">是否包含自己</param>
/// <param name="separator">分隔符</param>
/// <param name="func">回调</param>
/// <returns></returns>
String GetFullPath(Boolean includeSelf, String separator, Func<IMenu, String> func);
/// <summary>添加子菜单</summary>
/// <param name="name"></param>
/// <param name="displayName"></param>
/// <param name="fullName"></param>
/// <param name="url"></param>
/// <returns></returns>
IMenu Add(String name, String displayName, String fullName, String url);
/// <summary>父菜单</summary>
IMenu Parent { get; }
/// <summary>子菜单</summary>
IList<IMenu> Childs { get; }
/// <summary>子孙菜单</summary>
IList<IMenu> AllChilds { get; }
/// <summary>根据层次路径查找。因为需要指定在某个菜单子级查找路径,所以是成员方法而不是静态方法</summary>
/// <param name="path">层次路径</param>
/// <returns></returns>
IMenu FindByPath(String path);
/// <summary>排序上升</summary>
void Up();
/// <summary>排序下降</summary>
void Down();
/// <summary></summary>
/// <param name="filters"></param>
/// <param name="inclInvisible">是否包含不可见菜单</param>
/// <returns></returns>
IList<IMenu> GetSubMenus(Int32[] filters, Boolean inclInvisible);
/// <summary>可选权限子项</summary>
Dictionary<Int32, String> Permissions { get; }
}
} | 33.780451 | 165 | 0.483707 | [
"MIT"
] | NewLifeX/X | XCode/Membership/菜单.Biz.cs | 24,534 | C# |
using System;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using Elastic.Xunit.XunitPlumbing;
using Elasticsearch.Net;
using Elasticsearch.Net.Specification.CatApi;
using FluentAssertions;
using Nest;
using Tests.Core.ManagedElasticsearch.Clusters;
using Tests.Domain;
namespace Tests.Reproduce
{
public class GithubIssue4243 : IClusterFixture<ReadOnlyCluster>
{
private readonly ReadOnlyCluster _cluster;
public GithubIssue4243(ReadOnlyCluster cluster) => _cluster = cluster;
[I]
public async Task UsingFormatJsonIsSuccessfulResponse()
{
var connectionConfiguration = new ConnectionConfiguration(_cluster.Client.ConnectionSettings.ConnectionPool);
var lowLevelClient = new ElasticLowLevelClient(connectionConfiguration);
var response = await lowLevelClient.Cat.MasterAsync<StringResponse>(new CatMasterRequestParameters { Format = "JSON" });
response.Success.Should().BeTrue();
response.ApiCall.HttpStatusCode.Should().Be(200);
response.OriginalException.Should().BeNull();
}
}
}
| 30.588235 | 123 | 0.800962 | [
"Apache-2.0"
] | adamralph/elasticsearch-net | tests/Tests.Reproduce/GithubIssue4243.cs | 1,040 | C# |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class FollowPlayer : MonoBehaviour
{
public Transform player;
public Vector3 offset;
// Start is called before the first frame update
void Start()
{
}
// Update is called once per frame
void Update()
{
//prints out player position to the console
//Debug.Log(player.position);
// change position of the 'transform' parameter
//of the current object (object that script is applied to)
// to position of the player object
transform.position = player.position + offset;
}
}
| 24.259259 | 65 | 0.654962 | [
"MIT"
] | alexvidzup/Sphereov0.3 | Assets/Scripts/FollowPlayer.cs | 657 | C# |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.